diff --git a/.devcontainer/welcome b/.devcontainer/welcome index 219f04c576..de8a3f1afe 100644 --- a/.devcontainer/welcome +++ b/.devcontainer/welcome @@ -1,9 +1,9 @@ -------------------------------------------------------------------- ******************************************************************** -👋👋👋 Welcome to the Infrahub Demo Codespace environment 👋👋👋 +👋👋👋 Welcome to the Infrahub Demo Codespaces environment 👋👋👋 -The Infrahub API server, Git agents and dependent services have been +The Infrahub API server, Task workers and dependent services have been started. Infrahub can be accessed by opening the ports tab, clicking on the globe icon in the Forwarded Address column of port 8000. diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 56053f0d5d..71727c034a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,6 +7,7 @@ on: branches: - develop - stable + - release-* concurrency: group: ${{ github.workflow }}-${{ github.ref }} @@ -116,7 +117,7 @@ jobs: with: submodules: true - name: "Setup environment" - run: "pip install ruff==0.6.6" + run: "pip install ruff==0.7.1" - name: "Linting: ruff check" run: "ruff check . --exclude python_sdk" - name: "Linting: ruff format" @@ -734,7 +735,8 @@ jobs: if: | always() && !cancelled() && !contains(needs.*.result, 'failure') && - !contains(needs.*.result, 'cancelled') + !contains(needs.*.result, 'cancelled') && + github.base_ref == 'develop' runs-on: group: huge-runners steps: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bb94c87dec..f3a8f0b84a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,7 +13,7 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.6.6 + rev: v0.7.1 hooks: # Run the linter. - id: ruff diff --git a/.vale/styles/Infrahub/sentence-case.yml b/.vale/styles/Infrahub/sentence-case.yml index 85f5923388..585f6d3a45 100644 --- a/.vale/styles/Infrahub/sentence-case.yml +++ b/.vale/styles/Infrahub/sentence-case.yml @@ -49,6 +49,7 @@ exceptions: - JetStream - Jinja - Jinja2 + - JWT - Namespace - NATS - Node @@ -63,9 +64,11 @@ exceptions: - RFile - SDK - Single sign-on + - SSO - TLS - Tony Stark - TransformPython + - UI - Vale - VS Code - VS Code extensions diff --git a/.vale/styles/spelling-exceptions.txt b/.vale/styles/spelling-exceptions.txt index 7034fa9926..0cebfec559 100644 --- a/.vale/styles/spelling-exceptions.txt +++ b/.vale/styles/spelling-exceptions.txt @@ -4,6 +4,7 @@ APIs artifact_definitions artifact_name async +Authentik boolean check_definitions class_name @@ -81,6 +82,7 @@ Newsfragment Nornir npm o'brian +order_weight openconfig opentelemetry order_by @@ -95,6 +97,7 @@ REST ressources schema_mapping sdk +subcommand subnet template_path toml diff --git a/CHANGELOG.md b/CHANGELOG.md index 5cdc3ac7c9..dd63e7c12f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,64 @@ This project uses [*towncrier*](https://towncrier.readthedocs.io/) and the chang +## [1.0.0](https://github.com/opsmill/infrahub/tree/v1.0.0) - 2024-10-30 + +### Removed + +- Remove previously deprecated GET API endpoint "/api/schema/" ([#3884](https://github.com/opsmill/infrahub/issues/3884)) + +### Deprecated + +- Marked CoreAccount.role as deprecated + Due to the new permissions framework the account roles "admin" / "read-only" / "read-write" are deprecated and will be removed in Infrahub 1.1 + +### Added + +- Reworked branch selector: + - Redesigned the UI + - Added filter for branch + - Improved accessibility & keyboard navigation + - Improved UX on new branch form + - Added quick link to view all branches +- Add support to sign in with OAuth2 and Open ID Connect (OIDC) ([#1568](https://github.com/opsmill/infrahub/issues/1568)) +- Add internal HTTP adapter to allow for generic access from Infrahub ([#3302](https://github.com/opsmill/infrahub/issues/3302)) +- Add support to search a node by human friendly ID within a GraphQL query ([#3908](https://github.com/opsmill/infrahub/issues/3908)) +- Added link to our Discord server in the account menu +- Added permissions framework for global and object kind level permissions + + In this first iteration the object permissions are applied to nodes as a whole, in upcoming versions it will be possible to define attribute level permissions as well. +- New permissions system in UI: + - Implemented CRUD views for managing accounts, groups, roles, and permissions + - Updated all components to support new permission system + - Added dynamic message display according to user access levels + +### Fixed + +- The `infrahub-git` agent service has been renamed to `task-worker` in docker compose and the command to start it has been updated as well ([#1075](https://github.com/opsmill/infrahub/issues/1075)) +- Add ability to import repositories with default branch other than 'main' ([#3435](https://github.com/opsmill/infrahub/issues/3435)) +- Disable approve/merge/close buttons for merged Proposed Changes ([#3495](https://github.com/opsmill/infrahub/issues/3495)) +- Fixed regex validation for List type attributes ([#3929](https://github.com/opsmill/infrahub/issues/3929)) +- Allow users to run artifacts and generators on nodes without name attribute ([#4062](https://github.com/opsmill/infrahub/issues/4062)) +- In the schema, properly delete inherited attribute and relationship on Node when the original attribute or relationship are being deleted on the Generic ([#4301](https://github.com/opsmill/infrahub/issues/4301)) +- "Retry All" button for checks is bigger ([#4315](https://github.com/opsmill/infrahub/issues/4315)) +- Add a size restriction on common attribute kinds. Only TextArea and JSON support large values ([#4432](https://github.com/opsmill/infrahub/issues/4432)) +- The HFID of a related node is properly returned via GraphQL in all scenarios ([#4482](https://github.com/opsmill/infrahub/issues/4482)) +- Add full validation to BranchMerge and BranchRebase mutations ([#4595](https://github.com/opsmill/infrahub/issues/4595)) +- Report user-friendly error for invalid uniqueness_constraints when loading schemas ([#4677](https://github.com/opsmill/infrahub/issues/4677)) +- Fixed pagination query for nodes with order_by clause using non unique attributes ([#4700](https://github.com/opsmill/infrahub/issues/4700)) +- Fixed schema migration when an attribute previously present on a node is added back ([#4727](https://github.com/opsmill/infrahub/issues/4727)) +- Add order_weight property to multiple attributes and relationships in the demo schema to improve how some models are displayed in the list views +- Changed the Python SDK connection timeout to 60s +- Fix metric missing the query name in Prometheus data +- Fixes an issue where docker compose would output ANSI control characters that don't support it +- Prevent temporary directories generated by Docusaurus to be imported by Docker + +## [0.16.4](https://github.com/opsmill/infrahub/tree/v0.16.4) - 2024-10-17 + +### Fixed + +- Fixed an issue on the UI where a new relationship was being added to the main branch instead of the current branch. ([#4598](https://github.com/opsmill/infrahub/issues/4598)) + ## [0.16.3](https://github.com/opsmill/infrahub/tree/v0.16.3) - 2024-10-10 ### Removed diff --git a/README.md b/README.md index 7388295bc6..c214ecff77 100644 --- a/README.md +++ b/README.md @@ -10,9 +10,6 @@ Infrahub from [OpsMill](https://opsmill.com) is taking a new approach to Infrast ## Quick Start -> [!NOTE] -> Infrahub is currently in beta, and the team is actively working towards reaching version 1.0 by the end of the year. The project is committed to ensuring data safety and providing a migration path for future releases. See our [FAQ](https://docs.infrahub.app/faq/) for more information. - Leveraging [GitHub Codespaces](https://docs.github.com/en/codespaces/overview), it's possible to start a new instance of Infrahub in the Cloud in minutes: | No Data | Demo Data | diff --git a/backend/infrahub/api/artifact.py b/backend/infrahub/api/artifact.py index 8d3042e264..123b60b0c0 100644 --- a/backend/infrahub/api/artifact.py +++ b/backend/infrahub/api/artifact.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import TYPE_CHECKING - from fastapi import APIRouter, Body, Depends, Request, Response from pydantic import BaseModel, Field @@ -11,11 +9,9 @@ from infrahub.core.protocols import CoreArtifactDefinition from infrahub.database import InfrahubDatabase # noqa: TCH001 from infrahub.exceptions import NodeNotFoundError +from infrahub.git.models import RequestArtifactDefinitionGenerate from infrahub.log import get_logger -from infrahub.message_bus import messages - -if TYPE_CHECKING: - from infrahub.services import InfrahubServices +from infrahub.workflows.catalogue import REQUEST_ARTIFACT_DEFINITION_GENERATE log = get_logger() router = APIRouter(prefix="/artifact") @@ -68,9 +64,9 @@ async def generate_artifact( branch=branch_params.branch, ) - service: InfrahubServices = request.app.state.service - await service.send( - message=messages.RequestArtifactDefinitionGenerate( - artifact_definition=artifact_definition.id, branch=branch_params.branch.name, limit=payload.nodes - ) + service = request.app.state.service + model = RequestArtifactDefinitionGenerate( + artifact_definition=artifact_definition.id, branch=branch_params.branch.name, limit=payload.nodes ) + + await service.workflow.submit_workflow(workflow=REQUEST_ARTIFACT_DEFINITION_GENERATE, parameters={"model": model}) diff --git a/backend/infrahub/api/menu.py b/backend/infrahub/api/menu.py index 705d01ed62..20762ad638 100644 --- a/backend/infrahub/api/menu.py +++ b/backend/infrahub/api/menu.py @@ -9,7 +9,7 @@ from infrahub.core.branch import Branch # noqa: TCH001 from infrahub.core.protocols import CoreMenuItem from infrahub.log import get_logger -from infrahub.menu.generator import generate_menu +from infrahub.menu.generator import generate_restricted_menu from infrahub.menu.models import Menu # noqa: TCH001 if TYPE_CHECKING: @@ -29,9 +29,6 @@ async def get_menu( ) -> Menu: log.info("menu_request", branch=branch.name) - menu_items = await registry.manager.query( - db=db, schema=CoreMenuItem, branch=branch - ) # , prefetch_relationships=True) - menu = await generate_menu(db=db, branch=branch, account=account_session, menu_items=menu_items) - + menu_items = await registry.manager.query(db=db, schema=CoreMenuItem, branch=branch) + menu = await generate_restricted_menu(db=db, branch=branch, account=account_session, menu_items=menu_items) return menu.to_rest() diff --git a/backend/infrahub/api/schema.py b/backend/infrahub/api/schema.py index a577264282..32fbdaa83f 100644 --- a/backend/infrahub/api/schema.py +++ b/backend/infrahub/api/schema.py @@ -247,10 +247,8 @@ async def load_schema( for permission_backend in registry.permission_backends: if not await permission_backend.has_permission( db=db, - account_id=account_session.account_id, + account_session=account_session, permission=GlobalPermission( - id="", - name="", action=GlobalPermissions.MANAGE_SCHEMA.value, decision=( PermissionDecision.ALLOW_DEFAULT @@ -333,7 +331,9 @@ async def load_schema( migrations=result.migrations, ) migration_error_msgs = await service.workflow.execute_workflow( - workflow=SCHEMA_APPLY_MIGRATION, expected_return=list[str], parameters={"message": apply_migration_data} + workflow=SCHEMA_APPLY_MIGRATION, + expected_return=list[str], + parameters={"message": apply_migration_data}, ) if migration_error_msgs: @@ -381,7 +381,9 @@ async def check_schema( constraints=result.constraints, ) error_messages = await service.workflow.execute_workflow( - workflow=SCHEMA_VALIDATE_MIGRATION, expected_return=list[str], parameters={"message": validate_migration_data} + workflow=SCHEMA_VALIDATE_MIGRATION, + expected_return=list[str], + parameters={"message": validate_migration_data}, ) if error_messages: raise SchemaNotValidError(message=",\n".join(error_messages)) diff --git a/backend/infrahub/cli/db.py b/backend/infrahub/cli/db.py index d25554c412..4ea5ff46e3 100644 --- a/backend/infrahub/cli/db.py +++ b/backend/infrahub/cli/db.py @@ -22,6 +22,7 @@ from infrahub.core.graph.index import node_indexes, rel_indexes from infrahub.core.graph.schema import GRAPH_SCHEMA from infrahub.core.initialization import ( + create_anonymous_role, create_default_menu, create_default_roles, create_super_administrator_role, @@ -304,7 +305,9 @@ async def update_core_schema( # pylint: disable=too-many-statements migrations=result.migrations, ) migration_error_msgs = await service.workflow.execute_workflow( - workflow=SCHEMA_APPLY_MIGRATION, expected_return=list[str], parameters={"message": apply_migration_data} + workflow=SCHEMA_APPLY_MIGRATION, + expected_return=list[str], + parameters={"message": apply_migration_data}, ) if migration_error_msgs: @@ -429,3 +432,6 @@ async def setup_permissions(db: InfrahubDatabase) -> None: await create_super_administrators_group(db=db, role=administrator_role, admin_accounts=existing_accounts) await create_default_roles(db=db) + + if config.SETTINGS.main.allow_anonymous_access: + await create_anonymous_role(db=db) diff --git a/backend/infrahub/config.py b/backend/infrahub/config.py index 7d86f89b7b..163e7364d0 100644 --- a/backend/infrahub/config.py +++ b/backend/infrahub/config.py @@ -119,11 +119,11 @@ class MainSettings(BaseSettings): allow_anonymous_access: bool = Field( default=True, description="Indicates if the system allows anonymous read access" ) + anonymous_access_role: str = Field( + default="Anonymous User", description="Name of the role defining which permissions anonymous users have" + ) telemetry_optout: bool = Field(default=False, description="Disable anonymous usage reporting") telemetry_endpoint: str = "https://telemetry.opsmill.cloud/infrahub" - telemetry_interval: int = Field( - default=3600 * 24, ge=60, description="Time (in seconds) between telemetry usage push" - ) permission_backends: list[str] = Field( default=["infrahub.permissions.LocalPermissionBackend"], description="List of modules to handle permissions, they will be run in the given order", @@ -326,7 +326,10 @@ class GitSettings(BaseSettings): model_config = SettingsConfigDict(env_prefix="INFRAHUB_GIT_") repositories_directory: str = "repositories" sync_interval: int = Field( - default=10, ge=0, description="Time (in seconds) between git repositories synchronizations" + default=10, + ge=0, + description="Time (in seconds) between git repositories synchronizations", + deprecated="This setting is deprecated and not currently in use.", ) @@ -448,6 +451,14 @@ class SecurityOIDCProvider2(SecurityOIDCSettings): model_config = SettingsConfigDict(env_prefix="INFRAHUB_OIDC_PROVIDER2_") +class SecurityOIDCProviderSettings(BaseModel): + """This class is meant to facilitate configuration of OIDC providers when loading configuration from a infrahub.toml file.""" + + google: Optional[SecurityOIDCGoogle] = Field(default=None) + provider1: Optional[SecurityOIDCProvider1] = Field(default=None) + provider2: Optional[SecurityOIDCProvider2] = Field(default=None) + + class SecurityOAuth2BaseSettings(BaseSettings): """Baseclass for typing""" @@ -487,6 +498,14 @@ class SecurityOAuth2Google(SecurityOAuth2Settings): display_label: str = Field(default="Google") +class SecurityOAuth2ProviderSettings(BaseModel): + """This class is meant to facilitate configuration of OAuth2 providers when loading configuration from a infrahub.toml file.""" + + google: Optional[SecurityOAuth2Google] = Field(default=None) + provider1: Optional[SecurityOAuth2Provider1] = Field(default=None) + provider2: Optional[SecurityOAuth2Provider2] = Field(default=None) + + class MiscellaneousSettings(BaseSettings): model_config = SettingsConfigDict(env_prefix="INFRAHUB_MISC_") print_query_details: bool = False @@ -532,7 +551,9 @@ class SecuritySettings(BaseSettings): default_factory=generate_uuid, description="The secret key used to validate authentication tokens" ) oauth2_providers: list[Oauth2Provider] = Field(default_factory=list, description="The selected OAuth2 providers") + oauth2_provider_settings: SecurityOAuth2ProviderSettings = Field(default_factory=SecurityOAuth2ProviderSettings) oidc_providers: list[OIDCProvider] = Field(default_factory=list, description="The selected OIDC providers") + oidc_provider_settings: SecurityOIDCProviderSettings = Field(default_factory=SecurityOIDCProviderSettings) _oauth2_settings: dict[str, SecurityOAuth2Settings] = PrivateAttr(default_factory=dict) _oidc_settings: dict[str, SecurityOIDCSettings] = PrivateAttr(default_factory=dict) @@ -544,9 +565,21 @@ def check_oauth2_provider_settings(self) -> Self: Oauth2Provider.GOOGLE: SecurityOAuth2Google, } for oauth2_provider in self.oauth2_providers: - provider = mapped_providers[oauth2_provider]() - if isinstance(provider, SecurityOAuth2Settings): - self._oauth2_settings[oauth2_provider.value] = provider + match oauth2_provider: + case Oauth2Provider.GOOGLE: + if self.oauth2_provider_settings.google: + self._oauth2_settings[oauth2_provider.value] = self.oauth2_provider_settings.google + case Oauth2Provider.PROVIDER1: + if self.oauth2_provider_settings.provider1: + self._oauth2_settings[oauth2_provider.value] = self.oauth2_provider_settings.provider1 + case Oauth2Provider.PROVIDER2: + if self.oauth2_provider_settings.provider2: + self._oauth2_settings[oauth2_provider.value] = self.oauth2_provider_settings.provider2 + + if oauth2_provider.value not in self._oauth2_settings: + provider = mapped_providers[oauth2_provider]() + if isinstance(provider, SecurityOAuth2Settings): + self._oauth2_settings[oauth2_provider.value] = provider return self @@ -558,9 +591,21 @@ def check_oidc_provider_settings(self) -> Self: OIDCProvider.PROVIDER2: SecurityOIDCProvider2, } for oidc_provider in self.oidc_providers: - provider = mapped_providers[oidc_provider]() - if isinstance(provider, SecurityOIDCSettings): - self._oidc_settings[oidc_provider.value] = provider + match oidc_provider: + case OIDCProvider.GOOGLE: + if self.oidc_provider_settings.google: + self._oidc_settings[oidc_provider.value] = self.oidc_provider_settings.google + case OIDCProvider.PROVIDER1: + if self.oidc_provider_settings.provider1: + self._oidc_settings[oidc_provider.value] = self.oidc_provider_settings.provider1 + case OIDCProvider.PROVIDER2: + if self.oidc_provider_settings.provider2: + self._oidc_settings[oidc_provider.value] = self.oidc_provider_settings.provider2 + + if oidc_provider.value not in self._oidc_settings: + provider = mapped_providers[oidc_provider]() + if isinstance(provider, SecurityOIDCSettings): + self._oidc_settings[oidc_provider.value] = provider return self @@ -632,7 +677,7 @@ def initialize(self, config_file: Optional[str] = None) -> None: if not config_file: config_file_name = os.environ.get("INFRAHUB_CONFIG", "infrahub.toml") config_file = os.path.abspath(config_file_name) - load(config_file) + self.settings = load(config_file) def initialize_and_exit(self, config_file: Optional[str] = None) -> None: """Initialize the settings if they have not been initialized, exit on failures.""" @@ -744,7 +789,7 @@ class Settings(BaseSettings): experimental_features: ExperimentalFeaturesSettings = ExperimentalFeaturesSettings() -def load(config_file_name: str = "infrahub.toml", config_data: Optional[dict[str, Any]] = None) -> None: +def load(config_file_name: str = "infrahub.toml", config_data: Optional[dict[str, Any]] = None) -> Settings: """Load configuration. Configuration is loaded from a config file in toml format that contains the settings, @@ -752,17 +797,15 @@ def load(config_file_name: str = "infrahub.toml", config_data: Optional[dict[str """ if config_data: - SETTINGS.settings = Settings(**config_data) - return + return Settings(**config_data) if os.path.exists(config_file_name): config_string = Path(config_file_name).read_text(encoding="utf-8") config_tmp = toml.loads(config_string) SETTINGS.settings = Settings(**config_tmp) - return - SETTINGS.settings = Settings() + return Settings() def load_and_exit(config_file_name: str = "infrahub.toml", config_data: Optional[dict[str, Any]] = None) -> None: @@ -776,7 +819,7 @@ def load_and_exit(config_file_name: str = "infrahub.toml", config_data: Optional config_data (dict, optional): [description]. Defaults to None. """ try: - load(config_file_name=config_file_name, config_data=config_data) + SETTINGS.settings = load(config_file_name=config_file_name, config_data=config_data) except ValidationError as err: print(f"Configuration not valid, found {len(err.errors())} error(s)") for error in err.errors(): diff --git a/backend/infrahub/core/account.py b/backend/infrahub/core/account.py index 31cbb1cc93..04226bf6bf 100644 --- a/backend/infrahub/core/account.py +++ b/backend/infrahub/core/account.py @@ -3,6 +3,8 @@ from dataclasses import dataclass from typing import TYPE_CHECKING, Any, Optional, Union +from typing_extensions import Self + from infrahub.core.constants import InfrahubKind, PermissionDecision from infrahub.core.query import Query from infrahub.core.registry import registry @@ -17,23 +19,41 @@ @dataclass -class Permission: - id: str - name: str +class GlobalPermission: action: str decision: int + description: str = "" + id: str = "" - -@dataclass -class GlobalPermission(Permission): def __str__(self) -> str: decision = PermissionDecision(self.decision) return f"global:{self.action}:{decision.name.lower()}" + @classmethod + def from_string(cls, input: str) -> Self: + parts = input.split(":") + if len(parts) != 3 and parts[0] != "global": + raise ValueError(f"{input} is not a valid format for a Global permission") + + # FIXME there is probably a better way to convert the decision + decision = PermissionDecision.DENY + if parts[2] == "allow_default": + decision = PermissionDecision.ALLOW_DEFAULT + elif parts[2] == "allow_all": + decision = PermissionDecision.ALLOW_ALL + elif parts[2] == "allow_other": + decision = PermissionDecision.ALLOW_OTHER + return cls(action=str(parts[1]), decision=decision) + @dataclass -class ObjectPermission(Permission): +class ObjectPermission: namespace: str + name: str + action: str + decision: int + description: str = "" + id: str = "" def __str__(self) -> str: decision = PermissionDecision(self.decision) @@ -106,17 +126,6 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: } WITH global_permission - CALL { - WITH global_permission - MATCH (global_permission)-[r1:HAS_ATTRIBUTE]->(:Attribute {name: "name"})-[r2:HAS_VALUE]->(global_permission_name:AttributeValue) - WHERE all(r IN [r1, r2] WHERE (%(branch_filter)s)) - RETURN global_permission_name, (r1.status = "active" AND r2.status = "active") AS is_active - ORDER BY r2.branch_level DESC, r2.from DESC, r1.branch_level DESC, r1.from DESC - LIMIT 1 - } - WITH global_permission, global_permission_name, is_active AS gpn_is_active - WHERE gpn_is_active = TRUE - CALL { WITH global_permission MATCH (global_permission)-[r1:HAS_ATTRIBUTE]->(:Attribute {name: "action"})-[r2:HAS_VALUE]->(global_permission_action:AttributeValue) @@ -125,7 +134,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: ORDER BY r2.branch_level DESC, r2.from DESC, r1.branch_level DESC, r1.from DESC LIMIT 1 } - WITH global_permission, global_permission_name, global_permission_action, is_active AS gpa_is_active + WITH global_permission, global_permission_action, is_active AS gpa_is_active WHERE gpa_is_active = TRUE CALL { @@ -136,7 +145,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: ORDER BY r2.branch_level DESC, r2.from DESC, r1.branch_level DESC, r1.from DESC LIMIT 1 } - WITH global_permission, global_permission_name, global_permission_action, global_permission_decision, is_active AS gpd_is_active + WITH global_permission, global_permission_action, global_permission_decision, is_active AS gpd_is_active WHERE gpd_is_active = TRUE """ % { "branch_filter": branch_filter, @@ -148,12 +157,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: self.add_to_query(query) - self.return_labels = [ - "global_permission", - "global_permission_name", - "global_permission_action", - "global_permission_decision", - ] + self.return_labels = ["global_permission", "global_permission_action", "global_permission_decision"] def get_permissions(self) -> list[GlobalPermission]: permissions: list[GlobalPermission] = [] @@ -162,7 +166,6 @@ def get_permissions(self) -> list[GlobalPermission]: permissions.append( GlobalPermission( id=result.get("global_permission").get("uuid"), - name=result.get("global_permission_name").get("value"), action=result.get("global_permission_action").get("value"), decision=result.get("global_permission_decision").get("value"), ) @@ -322,6 +325,221 @@ async def fetch_permissions(account_id: str, db: InfrahubDatabase, branch: Branc return {"global_permissions": global_permissions, "object_permissions": object_permissions} +class AccountRoleGlobalPermissionQuery(Query): + name: str = "account_role_global_permissions" + + def __init__(self, role_id: str, **kwargs: Any): + self.role_id = role_id + super().__init__(**kwargs) + + async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: + self.params["role_id"] = self.role_id + + branch_filter, branch_params = self.branch.get_query_filter_path( + at=self.at.to_string(), branch_agnostic=self.branch_agnostic + ) + self.params.update(branch_params) + + # ruff: noqa: E501 + query = """ + MATCH (account_role:%(account_role_node)s) + WHERE account_role.uuid = $role_id + CALL { + WITH account_role + MATCH (account_role)-[r:IS_PART_OF]-(root:Root) + WHERE %(branch_filter)s + RETURN account_role as account_role1, r as r1 + ORDER BY r.branch_level DESC, r.from DESC + LIMIT 1 + } + WITH account_role, r1 as r + WHERE r.status = "active" + WITH account_role + + CALL { + WITH account_role + MATCH (account_role)-[r1:IS_RELATED]->(:Relationship {name: "role__permissions"})<-[r2:IS_RELATED]-(global_permission:%(global_permission_node)s) + WHERE all(r IN [r1, r2] WHERE (%(branch_filter)s)) + WITH global_permission, r1, r2, (r1.status = "active" AND r2.status = "active") AS is_active + ORDER BY global_permission.uuid, r2.branch_level DESC, r2.from DESC, r1.branch_level DESC, r1.from DESC + WITH global_permission, head(collect(is_active)) as latest_is_active + WHERE latest_is_active = TRUE + RETURN global_permission + } + WITH global_permission + + CALL { + WITH global_permission + MATCH (global_permission)-[r1:HAS_ATTRIBUTE]->(:Attribute {name: "action"})-[r2:HAS_VALUE]->(global_permission_action:AttributeValue) + WHERE all(r IN [r1, r2] WHERE (%(branch_filter)s)) + RETURN global_permission_action, (r1.status = "active" AND r2.status = "active") AS is_active + ORDER BY r2.branch_level DESC, r2.from DESC, r1.branch_level DESC, r1.from DESC + LIMIT 1 + } + WITH global_permission, global_permission_action, is_active AS gpa_is_active + WHERE gpa_is_active = TRUE + + CALL { + WITH global_permission + MATCH (global_permission)-[r1:HAS_ATTRIBUTE]->(:Attribute {name: "decision"})-[r2:HAS_VALUE]->(global_permission_decision:AttributeValue) + WHERE all(r IN [r1, r2] WHERE (%(branch_filter)s)) + RETURN global_permission_decision, (r1.status = "active" AND r2.status = "active") AS is_active + ORDER BY r2.branch_level DESC, r2.from DESC, r1.branch_level DESC, r1.from DESC + LIMIT 1 + } + WITH global_permission, global_permission_action, global_permission_decision, is_active AS gpd_is_active + WHERE gpd_is_active = TRUE + """ % { + "branch_filter": branch_filter, + "account_role_node": InfrahubKind.ACCOUNTROLE, + "global_permission_node": InfrahubKind.GLOBALPERMISSION, + } + + self.add_to_query(query) + + self.return_labels = ["global_permission", "global_permission_action", "global_permission_decision"] + + def get_permissions(self) -> list[GlobalPermission]: + permissions: list[GlobalPermission] = [] + + for result in self.get_results(): + permissions.append( + GlobalPermission( + id=result.get("global_permission").get("uuid"), + action=result.get("global_permission_action").get("value"), + decision=result.get("global_permission_decision").get("value"), + ) + ) + + return permissions + + +class AccountRoleObjectPermissionQuery(Query): + name: str = "account_role_object_permissions" + + def __init__(self, role_id: str, **kwargs: Any): + self.role_id = role_id + super().__init__(**kwargs) + + async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: + self.params["role_id"] = self.role_id + + branch_filter, branch_params = self.branch.get_query_filter_path( + at=self.at.to_string(), branch_agnostic=self.branch_agnostic + ) + self.params.update(branch_params) + + query = """ + MATCH (account_role:%(account_role_node)s) + WHERE account_role.uuid = $role_id + CALL { + WITH account_role + MATCH (account_role)-[r:IS_PART_OF]-(root:Root) + WHERE %(branch_filter)s + RETURN account_role as account_role1, r as r1 + ORDER BY r.branch_level DESC, r.from DESC + LIMIT 1 + } + WITH account_role, r1 as r + WHERE r.status = "active" + WITH account_role + + CALL { + WITH account_role + MATCH (account_role)-[r1:IS_RELATED]->(:Relationship {name: "role__permissions"})<-[r2:IS_RELATED]-(object_permission:%(object_permission_node)s) + WHERE all(r IN [r1, r2] WHERE (%(branch_filter)s)) + WITH object_permission, r1, r2, (r1.status = "active" AND r2.status = "active") AS is_active + ORDER BY object_permission.uuid, r2.branch_level DESC, r2.from DESC, r1.branch_level DESC, r1.from DESC + WITH object_permission, head(collect(is_active)) as latest_is_active + WHERE latest_is_active = TRUE + RETURN object_permission + } + WITH object_permission + + CALL { + WITH object_permission + MATCH (object_permission)-[r1:HAS_ATTRIBUTE]->(:Attribute {name: "namespace"})-[r2:HAS_VALUE]->(object_permission_namespace:AttributeValue) + WHERE all(r IN [r1, r2] WHERE (%(branch_filter)s)) + RETURN object_permission_namespace, (r1.status = "active" AND r2.status = "active") AS is_active + ORDER BY r2.branch_level DESC, r2.from DESC, r1.branch_level DESC, r1.from DESC + LIMIT 1 + } + WITH object_permission, object_permission_namespace, is_active AS opn_is_active + WHERE opn_is_active = TRUE + CALL { + WITH object_permission + MATCH (object_permission)-[r1:HAS_ATTRIBUTE]->(:Attribute {name: "name"})-[r2:HAS_VALUE]->(object_permission_name:AttributeValue) + WHERE all(r IN [r1, r2] WHERE (%(branch_filter)s)) + RETURN object_permission_name, (r1.status = "active" AND r2.status = "active") AS is_active + ORDER BY r2.branch_level DESC, r2.from DESC, r1.branch_level DESC, r1.from DESC + LIMIT 1 + } + WITH object_permission, object_permission_namespace, object_permission_name, is_active AS opn_is_active + WHERE opn_is_active = TRUE + CALL { + WITH object_permission + MATCH (object_permission)-[r1:HAS_ATTRIBUTE]->(:Attribute {name: "action"})-[r2:HAS_VALUE]->(object_permission_action:AttributeValue) + WHERE all(r IN [r1, r2] WHERE (%(branch_filter)s)) + RETURN object_permission_action, (r1.status = "active" AND r2.status = "active") AS is_active + ORDER BY r2.branch_level DESC, r2.from DESC, r1.branch_level DESC, r1.from DESC + LIMIT 1 + } + WITH object_permission, object_permission_namespace, object_permission_name, object_permission_action, is_active AS opa_is_active + WHERE opa_is_active = TRUE + CALL { + WITH object_permission + MATCH (object_permission)-[r1:HAS_ATTRIBUTE]->(:Attribute {name: "decision"})-[r2:HAS_VALUE]->(object_permission_decision:AttributeValue) + WHERE all(r IN [r1, r2] WHERE (%(branch_filter)s)) + RETURN object_permission_decision, (r1.status = "active" AND r2.status = "active") AS is_active + ORDER BY r2.branch_level DESC, r2.from DESC, r1.branch_level DESC, r1.from DESC + LIMIT 1 + } + WITH object_permission, object_permission_namespace, object_permission_name, object_permission_action, object_permission_decision, is_active AS opd_is_active + WHERE opd_is_active = TRUE + """ % { + "branch_filter": branch_filter, + "account_role_node": InfrahubKind.ACCOUNTROLE, + "object_permission_node": InfrahubKind.OBJECTPERMISSION, + } + + self.add_to_query(query) + + self.return_labels = [ + "object_permission", + "object_permission_namespace", + "object_permission_name", + "object_permission_action", + "object_permission_decision", + ] + + def get_permissions(self) -> list[ObjectPermission]: + permissions: list[ObjectPermission] = [] + for result in self.get_results(): + permissions.append( + ObjectPermission( + id=result.get("object_permission").get("uuid"), + namespace=result.get("object_permission_namespace").get("value"), + name=result.get("object_permission_name").get("value"), + action=result.get("object_permission_action").get("value"), + decision=result.get("object_permission_decision").get("value"), + ) + ) + + return permissions + + +async def fetch_role_permissions(role_id: str, db: InfrahubDatabase, branch: Branch) -> AssignedPermissions: + query1 = await AccountRoleGlobalPermissionQuery.init(db=db, branch=branch, role_id=role_id, branch_agnostic=True) + await query1.execute(db=db) + global_permissions = query1.get_permissions() + + query2 = await AccountRoleObjectPermissionQuery.init(db=db, branch=branch, role_id=role_id) + await query2.execute(db=db) + object_permissions = query2.get_permissions() + + return {"global_permissions": global_permissions, "object_permissions": object_permissions} + + class AccountTokenValidateQuery(Query): name: str = "account_token_validate" diff --git a/backend/infrahub/core/attribute.py b/backend/infrahub/core/attribute.py index 008ff1af82..655820cdf5 100644 --- a/backend/infrahub/core/attribute.py +++ b/backend/infrahub/core/attribute.py @@ -234,15 +234,21 @@ def validate_content(cls, value: Any, name: str, schema: AttributeSchema) -> Non ValidationError: Content of the attribute value is not valid """ if schema.regex: - try: - is_valid = re.match(pattern=schema.regex, string=str(value)) - except re.error as exc: - raise ValidationError( - {name: f"The regex defined in the schema is not valid ({schema.regex!r})"} - ) from exc + if schema.kind == "List": + validation_values = [str(entry) for entry in value] + else: + validation_values = [str(value)] - if not is_valid: - raise ValidationError({name: f"{value} must be conform with the regex: {schema.regex!r}"}) + for validation_value in validation_values: + try: + is_valid = re.match(pattern=schema.regex, string=str(validation_value)) + except re.error as exc: + raise ValidationError( + {name: f"The regex defined in the schema is not valid ({schema.regex!r})"} + ) from exc + + if not is_valid: + raise ValidationError({name: f"{validation_value} must conform with the regex: {schema.regex!r}"}) if schema.min_length: if len(value) < schema.min_length: @@ -390,12 +396,15 @@ async def _update(self, db: InfrahubDatabase, at: Optional[Timestamp] = None) -> self.validate(value=self.value, name=self.name, schema=self.schema) # Check if the current value is still the default one - if ( - self.is_default - and (self.schema.default_value is not None and self.schema.default_value != self.value) - or (self.schema.default_value is None and self.value is not None) - ): - self.is_default = False + if self.is_default: + if isinstance(self.value, Enum): + has_default_value = self.schema.default_value == self.value.value + else: + has_default_value = self.schema.default_value == self.value + if (self.schema.default_value is not None and not has_default_value) or ( + self.schema.default_value is None and self.value is not None + ): + self.is_default = False query = await NodeListGetAttributeQuery.init( db=db, @@ -506,9 +515,6 @@ async def to_graphql( ) continue - if field_name == "permissions": - response["permissions"] = {"view": "ALLOW", "update": "ALLOW"} - if field_name.startswith("_"): field = getattr(self, field_name[1:]) else: diff --git a/backend/infrahub/core/branch/__init__.py b/backend/infrahub/core/branch/__init__.py new file mode 100644 index 0000000000..558eee49fb --- /dev/null +++ b/backend/infrahub/core/branch/__init__.py @@ -0,0 +1,3 @@ +from .models import Branch + +__all__ = ["Branch"] diff --git a/backend/infrahub/core/branch.py b/backend/infrahub/core/branch/models.py similarity index 99% rename from backend/infrahub/core/branch.py rename to backend/infrahub/core/branch/models.py index 47eaf43f9c..7eb55320ec 100644 --- a/backend/infrahub/core/branch.py +++ b/backend/infrahub/core/branch/models.py @@ -119,13 +119,13 @@ def has_schema_changes(self) -> bool: return False - def update_schema_hash(self, at: Optional[Union[Timestamp, str]] = None) -> bool: + def update_schema_hash(self, at: Timestamp | str | None = None) -> bool: latest_schema = registry.schema.get_schema_branch(name=self.name) - self.schema_changed_at = Timestamp(at).to_string() new_hash = latest_schema.get_hash_full() if self.schema_hash and new_hash.main == self.schema_hash.main: return False + self.schema_changed_at = Timestamp(at).to_string() self.schema_hash = new_hash return True diff --git a/backend/infrahub/core/branch/tasks.py b/backend/infrahub/core/branch/tasks.py new file mode 100644 index 0000000000..cc89305f96 --- /dev/null +++ b/backend/infrahub/core/branch/tasks.py @@ -0,0 +1,201 @@ +from __future__ import annotations + +from prefect import flow, get_run_logger + +from infrahub import lock +from infrahub.core import registry +from infrahub.core.branch import Branch +from infrahub.core.diff.coordinator import DiffCoordinator +from infrahub.core.diff.ipam_diff_parser import IpamDiffParser +from infrahub.core.diff.merger.merger import DiffMerger +from infrahub.core.diff.repository.repository import DiffRepository +from infrahub.core.merge import BranchMerger +from infrahub.core.migrations.schema.models import SchemaApplyMigrationData +from infrahub.core.migrations.schema.tasks import schema_apply_migrations +from infrahub.core.validators.determiner import ConstraintValidatorDeterminer +from infrahub.core.validators.models.validate_migration import SchemaValidateMigrationData +from infrahub.core.validators.tasks import schema_validate_migrations +from infrahub.dependencies.registry import get_component_registry +from infrahub.exceptions import ValidationError +from infrahub.log import get_log_data +from infrahub.message_bus import Meta, messages +from infrahub.services import services +from infrahub.worker import WORKER_IDENTITY +from infrahub.workflows.catalogue import IPAM_RECONCILIATION +from infrahub.workflows.utils import add_branch_tag + + +@flow(name="branch-rebase") +async def rebase_branch(branch: str) -> None: + service = services.service + log = get_run_logger() + await add_branch_tag(branch_name=branch) + + obj = await Branch.get_by_name(db=service.database, name=branch) + base_branch = await Branch.get_by_name(db=service.database, name=registry.default_branch) + component_registry = get_component_registry() + diff_coordinator = await component_registry.get_component(DiffCoordinator, db=service.database, branch=obj) + diff_merger = await component_registry.get_component(DiffMerger, db=service.database, branch=obj) + merger = BranchMerger( + db=service.database, + diff_coordinator=diff_coordinator, + diff_merger=diff_merger, + source_branch=obj, + service=service, + ) + diff_repository = await component_registry.get_component(DiffRepository, db=service.database, branch=obj) + enriched_diff = await diff_coordinator.update_branch_diff(base_branch=base_branch, diff_branch=obj) + if enriched_diff.get_all_conflicts(): + raise ValidationError( + f"Branch {obj.name} contains conflicts with the default branch that must be addressed." + " Please review the diff for details and manually update the conflicts before rebasing." + ) + node_diff_field_summaries = await diff_repository.get_node_field_summaries( + diff_branch_name=enriched_diff.diff_branch_name, diff_id=enriched_diff.uuid + ) + + candidate_schema = merger.get_candidate_schema() + determiner = ConstraintValidatorDeterminer(schema_branch=candidate_schema) + constraints = await determiner.get_constraints(node_diffs=node_diff_field_summaries) + + # If there are some changes related to the schema between this branch and main, we need to + # - Run all the validations to ensure everything is correct before rebasing the branch + # - Run all the migrations after the rebase + if obj.has_schema_changes: + constraints += await merger.calculate_validations(target_schema=candidate_schema) + if constraints: + error_messages = await schema_validate_migrations( + message=SchemaValidateMigrationData(branch=obj, schema_branch=candidate_schema, constraints=constraints) + ) + if error_messages: + raise ValidationError(",\n".join(error_messages)) + + schema_in_main_before = merger.destination_schema.duplicate() + + async with lock.registry.global_graph_lock(): + async with service.database.start_transaction() as dbt: + await obj.rebase(db=dbt) + log.info("Branch successfully rebased") + + if obj.has_schema_changes: + # NOTE there is a bit additional work in order to calculate a proper diff that will + # allow us to pull only the part of the schema that has changed, for now the safest option is to pull + # Everything + # schema_diff = await merger.has_schema_changes() + # TODO Would be good to convert this part to a Prefect Task in order to track it properly + updated_schema = await registry.schema.load_schema_from_db( + db=service.database, + branch=obj, + # schema=merger.source_schema.duplicate(), + # schema_diff=schema_diff, + ) + registry.schema.set_schema_branch(name=obj.name, schema=updated_schema) + obj.update_schema_hash() + await obj.save(db=service.database) + + # Execute the migrations + migrations = await merger.calculate_migrations(target_schema=updated_schema) + + errors = await schema_apply_migrations( + message=SchemaApplyMigrationData( + branch=merger.source_branch, + new_schema=candidate_schema, + previous_schema=schema_in_main_before, + migrations=migrations, + ) + ) + for error in errors: + log.error(error) + + # ------------------------------------------------------------- + # Trigger the reconciliation of IPAM data after the rebase + # ------------------------------------------------------------- + differ = await merger.get_graph_diff() + diff_parser = IpamDiffParser( + db=service.database, + differ=differ, + source_branch_name=obj.name, + target_branch_name=registry.default_branch, + ) + ipam_node_details = await diff_parser.get_changed_ipam_node_details() + await service.workflow.submit_workflow( + workflow=IPAM_RECONCILIATION, parameters={"branch": obj.name, "ipam_node_details": ipam_node_details} + ) + + # ------------------------------------------------------------- + # Generate an event to indicate that a branch has been rebased + # NOTE: we still need to convert this event and potentially pull + # some tasks currently executed based on the event into this workflow + # ------------------------------------------------------------- + log_data = get_log_data() + request_id = log_data.get("request_id", "") + message = messages.EventBranchRebased( + branch=obj.name, + meta=Meta(initiator_id=WORKER_IDENTITY, request_id=request_id), + ) + await service.send(message=message) + + +@flow(name="branch-merge") +async def merge_branch(branch: str, conflict_resolution: dict[str, bool] | None = None) -> None: + service = services.service + log = get_run_logger() + + await add_branch_tag(branch_name=branch) + await add_branch_tag(branch_name=registry.default_branch) + + obj = await Branch.get_by_name(db=service.database, name=branch) + component_registry = get_component_registry() + + merger: BranchMerger | None = None + async with lock.registry.global_graph_lock(): + async with service.database.start_transaction() as db: + diff_coordinator = await component_registry.get_component(DiffCoordinator, db=db, branch=obj) + diff_merger = await component_registry.get_component(DiffMerger, db=db, branch=obj) + merger = BranchMerger( + db=db, diff_coordinator=diff_coordinator, diff_merger=diff_merger, source_branch=obj, service=service + ) + await merger.merge(conflict_resolution=conflict_resolution) + await merger.update_schema() + + if merger and merger.migrations: + errors = await schema_apply_migrations( + message=SchemaApplyMigrationData( + branch=merger.destination_branch, + new_schema=merger.destination_schema, + previous_schema=merger.initial_source_schema, + migrations=merger.migrations, + ) + ) + for error in errors: + log.error(error) + + # ------------------------------------------------------------- + # Trigger the reconciliation of IPAM data after the merge + # ------------------------------------------------------------- + differ = await merger.get_graph_diff() + diff_parser = IpamDiffParser( + db=service.database, + differ=differ, + source_branch_name=obj.name, + target_branch_name=registry.default_branch, + ) + ipam_node_details = await diff_parser.get_changed_ipam_node_details() + await service.workflow.submit_workflow( + workflow=IPAM_RECONCILIATION, + parameters={"branch": registry.default_branch, "ipam_node_details": ipam_node_details}, + ) + + # ------------------------------------------------------------- + # Generate an event to indicate that a branch has been merged + # NOTE: we still need to convert this event and potentially pull + # some tasks currently executed based on the event into this workflow + # ------------------------------------------------------------- + log_data = get_log_data() + request_id = log_data.get("request_id", "") + message = messages.EventBranchMerge( + source_branch=obj.name, + target_branch=registry.default_branch, + meta=Meta(initiator_id=WORKER_IDENTITY, request_id=request_id), + ) + await service.send(message=message) diff --git a/backend/infrahub/core/diff/branch_differ.py b/backend/infrahub/core/diff/branch_differ.py index 240823b261..670423ed00 100644 --- a/backend/infrahub/core/diff/branch_differ.py +++ b/backend/infrahub/core/diff/branch_differ.py @@ -747,7 +747,8 @@ async def _calculated_diff_rels(self) -> None: dst_node_id = result.get("dn").get("uuid") from_time = Timestamp(result.get("r1").get("from")) - # to_time = result.get("r1").get("to", None) + to_time_raw = result.get("r1").get("to", None) + to_time = Timestamp(to_time_raw) if to_time_raw else None item = { "branch": branch_name, @@ -779,14 +780,21 @@ async def _calculated_diff_rels(self) -> None: item["paths"] = relationship_paths.paths item["conflict_paths"] = relationship_paths.conflict_paths - # FIXME Need to revisit changed_at, mostlikely not accurate. More of a placeholder at this point - if branch_status == RelationshipStatus.ACTIVE.value: - item["action"] = DiffAction.ADDED - item["changed_at"] = from_time - elif branch_status == RelationshipStatus.DELETED.value: + if branch_status == RelationshipStatus.DELETED.value: item["action"] = DiffAction.REMOVED item["changed_at"] = from_time rel_ids_to_query.append(rel_id) + elif ( + branch_status == RelationshipStatus.ACTIVE.value + and to_time + and from_time < self.diff_from <= to_time <= self.diff_to + ): + item["action"] = DiffAction.REMOVED + item["changed_at"] = to_time + rel_ids_to_query.append(rel_id) + elif branch_status == RelationshipStatus.ACTIVE.value: + item["action"] = DiffAction.ADDED + item["changed_at"] = from_time else: raise ValueError(f"Unexpected value for branch_status: {branch_status}") @@ -804,7 +812,6 @@ async def _calculated_diff_rels(self) -> None: for result in query_props.get_results(): branch_name = result.get("r3").get("branch") - branch_status = result.get("r3").get("status") rel_name = result.get("rel").get("name") rel_id = result.get("rel").get("uuid") @@ -871,6 +878,8 @@ async def _calculated_diff_rels(self) -> None: prop_type = result.get_rel("r3").type prop_from = Timestamp(result.get("r3").get("from")) + prop_to_raw = result.get("r3").get("to", None) + prop_to = Timestamp(prop_to_raw) if prop_to_raw else None origin_prop = origin_rel_properties_query.get_results_by_id_and_prop_type( rel_id=rel_id, prop_type=prop_type @@ -892,6 +901,9 @@ async def _calculated_diff_rels(self) -> None: if not origin_prop and prop_from >= self.diff_from and branch_status == RelationshipStatus.ACTIVE.value: prop["action"] = DiffAction.ADDED prop["changed_at"] = prop_from + elif prop_to and prop_to >= self.diff_from and branch_status == RelationshipStatus.ACTIVE.value: + prop["action"] = DiffAction.REMOVED + prop["changed_at"] = prop_to elif prop_from >= self.diff_from and branch_status == RelationshipStatus.DELETED.value: prop["action"] = DiffAction.REMOVED prop["changed_at"] = prop_from diff --git a/backend/infrahub/core/diff/data_check_synchronizer.py b/backend/infrahub/core/diff/data_check_synchronizer.py index 492919db75..60975dd6a7 100644 --- a/backend/infrahub/core/diff/data_check_synchronizer.py +++ b/backend/infrahub/core/diff/data_check_synchronizer.py @@ -5,6 +5,7 @@ from infrahub.core.manager import NodeManager from infrahub.core.node import Node from infrahub.database import InfrahubDatabase +from infrahub.exceptions import SchemaNotFoundError from .conflicts_extractor import DiffConflictsExtractor from .model.path import ConflictSelection, EnrichedDiffConflict, EnrichedDiffRoot @@ -22,14 +23,18 @@ def __init__( self.conflict_recorder = conflict_recorder async def synchronize(self, enriched_diff: EnrichedDiffRoot) -> list[Node]: - proposed_changes = await NodeManager.query( - db=self.db, - schema=InfrahubKind.PROPOSEDCHANGE, - filters={"source_branch": enriched_diff.diff_branch_name, "state": ProposedChangeState.OPEN}, - ) + try: + proposed_changes = await NodeManager.query( + db=self.db, + schema=InfrahubKind.PROPOSEDCHANGE, + filters={"source_branch": enriched_diff.diff_branch_name, "state": ProposedChangeState.OPEN}, + ) + except SchemaNotFoundError: + # if the CoreProposedChange schema does not exist, then there's nothing to do + proposed_changes = [] if not proposed_changes: return [] - enriched_conflicts = enriched_diff.get_all_conflicts() + enriched_conflicts_map = enriched_diff.get_all_conflicts() data_conflicts = await self.conflicts_extractor.get_data_conflicts(enriched_diff_root=enriched_diff) all_data_checks = [] for pc in proposed_changes: @@ -38,7 +43,7 @@ async def synchronize(self, enriched_diff: EnrichedDiffRoot) -> list[Node]: ) all_data_checks.extend(core_data_checks) core_data_checks_by_id = {cdc.enriched_conflict_id.value: cdc for cdc in core_data_checks} # type: ignore[attr-defined] - enriched_conflicts_by_id = {ec.uuid: ec for ec in enriched_conflicts} + enriched_conflicts_by_id = {ec.uuid: ec for ec in enriched_conflicts_map.values()} for conflict_id, core_data_check in core_data_checks_by_id.items(): enriched_conflict = enriched_conflicts_by_id.get(conflict_id) if not enriched_conflict: diff --git a/backend/infrahub/core/diff/merger/serializer.py b/backend/infrahub/core/diff/merger/serializer.py index 6067b862f0..e199010d7f 100644 --- a/backend/infrahub/core/diff/merger/serializer.py +++ b/backend/infrahub/core/diff/merger/serializer.py @@ -267,19 +267,26 @@ def _get_default_property_merge_dicts(self, action: DiffAction) -> dict[Database ), } - def _get_added_removed_peers( - self, relationship_diff: EnrichedDiffSingleRelationship - ) -> tuple[str | None, str | None]: + def _get_actions_and_peers(self, relationship_diff: EnrichedDiffSingleRelationship) -> list[tuple[DiffAction, str]]: is_related_prop = [p for p in relationship_diff.properties if p.property_type is DatabaseEdgeType.IS_RELATED][0] actions_and_values = self._get_property_actions_and_values(property_diff=is_related_prop, python_value_type=str) - added = None - removed = None + actions_and_peers: list[tuple[DiffAction, str]] = [] for action, peer_id in actions_and_values: if action is DiffAction.ADDED: - added = str(peer_id) + actions_and_peers.append((DiffAction.ADDED, str(peer_id))) elif action is DiffAction.REMOVED: - removed = str(peer_id) - return added, removed + actions_and_peers.append((DiffAction.REMOVED, str(peer_id))) + + conflict = relationship_diff.conflict + if ( + conflict + and conflict.selected_branch + and conflict.selected_branch is ConflictSelection.DIFF_BRANCH + and conflict.base_branch_value + and conflict.base_branch_action in (DiffAction.ADDED, DiffAction.UPDATED) + ): + actions_and_peers.append((DiffAction.REMOVED, conflict.base_branch_value)) + return actions_and_peers def _serialize_relationship_element( self, relationship_diff: EnrichedDiffSingleRelationship, relationship_identifier: str, node_uuid: str @@ -291,10 +298,12 @@ def _serialize_relationship_element( relationship_dicts = [] added_property_dicts = self._get_default_property_merge_dicts(action=DiffAction.ADDED) removed_property_dicts = self._get_default_property_merge_dicts(action=DiffAction.REMOVED) - other_property_dicts = {} - added_peer_id, removed_peer_id = self._get_added_removed_peers(relationship_diff=relationship_diff) + other_property_dicts: dict[DatabaseEdgeType, PropertyMergeDict] = {} + actions_and_peers = self._get_actions_and_peers(relationship_diff=relationship_diff) + added_peer_ids = [peer_id for action, peer_id in actions_and_peers if action is DiffAction.ADDED] + removed_peer_ids = [peer_id for action, peer_id in actions_and_peers if action is DiffAction.REMOVED] - for action, peer_id in ((DiffAction.ADDED, added_peer_id), (DiffAction.REMOVED, removed_peer_id)): + for action, peer_id in actions_and_peers: if ( peer_id and (peer_id, relationship_identifier, node_uuid) not in self._conflicted_cardinality_one_relationships @@ -320,18 +329,17 @@ def _serialize_relationship_element( action=self._to_action_str(action=action), value=value, ) - if added_peer_id and action is DiffAction.ADDED: + if added_peer_ids and action is DiffAction.ADDED: added_property_dicts[property_diff.property_type] = property_dict - elif removed_peer_id and action is DiffAction.REMOVED: + elif removed_peer_ids and action is DiffAction.REMOVED: removed_property_dicts[property_diff.property_type] = property_dict else: other_property_dicts[property_diff.property_type] = property_dict relationship_property_dicts = [] - for peer_id, property_dicts in ( - (added_peer_id, added_property_dicts), - (removed_peer_id, removed_property_dicts), - (relationship_diff.peer_id, other_property_dicts), - ): + peers_and_property_dics = [(peer_id, added_property_dicts) for peer_id in added_peer_ids] + peers_and_property_dics += [(peer_id, removed_property_dicts) for peer_id in removed_peer_ids] + peers_and_property_dics += [(relationship_diff.peer_id, other_property_dicts)] + for peer_id, property_dicts in peers_and_property_dics: if ( peer_id and property_dicts diff --git a/backend/infrahub/core/diff/model/path.py b/backend/infrahub/core/diff/model/path.py index 3a21629fdc..b1fc156015 100644 --- a/backend/infrahub/core/diff/model/path.py +++ b/backend/infrahub/core/diff/model/path.py @@ -89,6 +89,13 @@ def __hash__(self) -> int: return hash(f"{self.node_uuid}:{self.field_name}") +@dataclass +class NodeDiffFieldSummary: + kind: str + attribute_names: set[str] = field(default_factory=set) + relationship_names: set[str] = field(default_factory=set) + + @dataclass class BaseSummary: num_added: int = field(default=0, kw_only=True) @@ -163,8 +170,8 @@ class EnrichedDiffAttribute(BaseSummary): def __hash__(self) -> int: return hash(self.name) - def get_all_conflicts(self) -> list[EnrichedDiffConflict]: - return [prop.conflict for prop in self.properties if prop.conflict] + def get_all_conflicts(self) -> dict[str, EnrichedDiffConflict]: + return {prop.path_identifier: prop.conflict for prop in self.properties if prop.conflict} @classmethod def from_calculated_attribute(cls, calculated_attribute: DiffAttribute) -> EnrichedDiffAttribute: @@ -192,11 +199,11 @@ class EnrichedDiffSingleRelationship(BaseSummary): def __hash__(self) -> int: return hash(self.peer_id) - def get_all_conflicts(self) -> list[EnrichedDiffConflict]: - all_conflicts = [] + def get_all_conflicts(self) -> dict[str, EnrichedDiffConflict]: + all_conflicts: dict[str, EnrichedDiffConflict] = {} if self.conflict: - all_conflicts.append(self.conflict) - all_conflicts.extend([prop.conflict for prop in self.properties if prop.conflict]) + all_conflicts[self.path_identifier] = self.conflict + all_conflicts.update({prop.path_identifier: prop.conflict for prop in self.properties if prop.conflict}) return all_conflicts def get_property(self, property_type: DatabaseEdgeType) -> EnrichedDiffProperty: @@ -232,10 +239,10 @@ class EnrichedDiffRelationship(BaseSummary): def __hash__(self) -> int: return hash(self.name) - def get_all_conflicts(self) -> list[EnrichedDiffConflict]: - all_conflicts = [] + def get_all_conflicts(self) -> dict[str, EnrichedDiffConflict]: + all_conflicts: dict[str, EnrichedDiffConflict] = {} for element in self.relationships: - all_conflicts.extend(element.get_all_conflicts()) + all_conflicts.update(element.get_all_conflicts()) return all_conflicts @property @@ -281,14 +288,14 @@ class EnrichedDiffNode(BaseSummary): def __hash__(self) -> int: return hash(self.uuid) - def get_all_conflicts(self) -> list[EnrichedDiffConflict]: - all_conflicts = [] + def get_all_conflicts(self) -> dict[str, EnrichedDiffConflict]: + all_conflicts: dict[str, EnrichedDiffConflict] = {} if self.conflict: - all_conflicts.append(self.conflict) + all_conflicts[self.path_identifier] = self.conflict for attribute in self.attributes: - all_conflicts.extend(attribute.get_all_conflicts()) + all_conflicts.update(attribute.get_all_conflicts()) for relationship in self.relationships: - all_conflicts.extend(relationship.get_all_conflicts()) + all_conflicts.update(relationship.get_all_conflicts()) return all_conflicts def get_parent_info(self, context: GraphqlContext | None = None) -> ParentNodeInfo | None: @@ -407,10 +414,10 @@ def has_node(self, node_uuid: str) -> bool: except ValueError: return False - def get_all_conflicts(self) -> list[EnrichedDiffConflict]: - all_conflicts = [] + def get_all_conflicts(self) -> dict[str, EnrichedDiffConflict]: + all_conflicts: dict[str, EnrichedDiffConflict] = {} for node in self.nodes: - all_conflicts.extend(node.get_all_conflicts()) + all_conflicts.update(node.get_all_conflicts()) return all_conflicts @classmethod @@ -665,19 +672,19 @@ def property_id(self) -> str: return self.property_node.element_id @property - def property_changed_at(self) -> Timestamp: + def property_from_time(self) -> Timestamp: return Timestamp(self.path_to_property.get("from")) @property - def property_status(self) -> RelationshipStatus: - return RelationshipStatus(self.path_to_property.get("status")) + def property_to_time(self) -> Timestamp | None: + raw_to = self.path_to_property.get("to") + if not raw_to: + return None + return Timestamp(str(raw_to)) @property - def property_end_time(self) -> Optional[Timestamp]: - end_time_str = self.path_to_property.get("to") - if not end_time_str: - return None - return Timestamp(end_time_str) + def property_status(self) -> RelationshipStatus: + return RelationshipStatus(self.path_to_property.get("status")) @property def property_value(self) -> Any: diff --git a/backend/infrahub/message_bus/messages/request_diff_update.py b/backend/infrahub/core/diff/models.py similarity index 53% rename from backend/infrahub/message_bus/messages/request_diff_update.py rename to backend/infrahub/core/diff/models.py index 7310473f77..fa898455b1 100644 --- a/backend/infrahub/message_bus/messages/request_diff_update.py +++ b/backend/infrahub/core/diff/models.py @@ -1,9 +1,7 @@ -from pydantic import Field +from pydantic import BaseModel, Field -from infrahub.message_bus import InfrahubMessage - -class RequestDiffUpdate(InfrahubMessage): +class RequestDiffUpdate(BaseModel): """ Request diff to be updated. @@ -15,3 +13,10 @@ class RequestDiffUpdate(InfrahubMessage): name: str | None = None from_time: str | None = None to_time: str | None = None + + +class RequestDiffRefresh(BaseModel): + """Request diff be recalculated from scratch.""" + + branch_name: str = Field(..., description="The branch associated with the diff") + diff_id: str = Field(..., description="The id for this diff") diff --git a/backend/infrahub/core/diff/query/field_summary.py b/backend/infrahub/core/diff/query/field_summary.py new file mode 100644 index 0000000000..ec50974858 --- /dev/null +++ b/backend/infrahub/core/diff/query/field_summary.py @@ -0,0 +1,78 @@ +from typing import Any + +from infrahub.core.constants import DiffAction +from infrahub.core.query import Query, QueryType +from infrahub.database import InfrahubDatabase + +from ..model.path import NodeDiffFieldSummary, TrackingId + + +class EnrichedDiffNodeFieldSummaryQuery(Query): + """ + Get node kind and names of all altered attributes and relationships for each kind + """ + + name = "enriched_diff_node_field_summary" + type = QueryType.READ + + def __init__( + self, + diff_branch_name: str, + tracking_id: TrackingId | None = None, + diff_id: str | None = None, + **kwargs: Any, + ) -> None: + super().__init__(**kwargs) + self.diff_branch_name = diff_branch_name + self.tracking_id = tracking_id + self.diff_id = diff_id + + async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: + if self.tracking_id is None and self.diff_id is None: + raise RuntimeError("Either tacking_id or diff_id is required") + self.params = { + "unchanged_str": DiffAction.UNCHANGED.value, + "diff_branch": self.diff_branch_name, + "tracking_id": self.tracking_id.serialize() if self.tracking_id else None, + "diff_id": self.diff_id, + } + query = """ + MATCH (diff_root:DiffRoot) + WHERE diff_root.diff_branch = $diff_branch + AND (diff_root.tracking_id = $tracking_id OR $tracking_id IS NULL) + AND (diff_root.uuid = $diff_id OR $diff_id IS NULL) + OPTIONAL MATCH (diff_root)-[:DIFF_HAS_NODE]->(n:DiffNode) + WHERE n.action <> $unchanged_str + WITH DISTINCT n.kind AS kind + CALL { + WITH kind + OPTIONAL MATCH (n:DiffNode {kind: kind})-[:DIFF_HAS_ATTRIBUTE]->(a:DiffAttribute) + WHERE n.action <> $unchanged_str + AND a.action <> $unchanged_str + WITH DISTINCT a.name AS attr_name + RETURN collect(attr_name) AS attr_names + } + WITH kind, attr_names + CALL { + WITH kind + OPTIONAL MATCH (n:DiffNode {kind: kind})-[:DIFF_HAS_RELATIONSHIP]->(r:DiffRelationship) + WHERE n.action <> $unchanged_str + AND r.action <> $unchanged_str + WITH DISTINCT r.name AS rel_name + RETURN collect(rel_name) AS rel_names + } + """ + self.add_to_query(query=query) + self.return_labels = ["kind", "attr_names", "rel_names"] + + async def get_field_summaries(self) -> list[NodeDiffFieldSummary]: + field_summaries = [] + for result in self.get_results(): + kind = result.get_as_type(label="kind", return_type=str) + attr_names = result.get_as_type(label="attr_names", return_type=list[str]) + rel_names = result.get_as_type(label="rel_names", return_type=list[str]) + if attr_names or rel_names: + field_summaries.append( + NodeDiffFieldSummary(kind=kind, attribute_names=set(attr_names), relationship_names=set(rel_names)) + ) + return field_summaries diff --git a/backend/infrahub/core/diff/query/merge.py b/backend/infrahub/core/diff/query/merge.py index 38855a223d..465c321732 100644 --- a/backend/infrahub/core/diff/query/merge.py +++ b/backend/infrahub/core/diff/query/merge.py @@ -351,8 +351,12 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: // with different is_default values // ------------------------------ WITH attr_rel, property_diff - OPTIONAL MATCH (attr_rel)-[r_attr_val:HAS_VALUE]->(av:AttributeValue {value: property_diff.value}) + OPTIONAL MATCH (attr_rel)-[r_attr_val:HAS_VALUE]->(av:AttributeValue) WHERE property_diff.property_type = "HAS_VALUE" + AND ( + av.value = property_diff.value + OR toLower(toString(av.value)) = toLower(toString(property_diff.value)) + ) AND r_attr_val.branch IN [$source_branch, $target_branch] RETURN av ORDER BY r_attr_val.from DESC @@ -386,7 +390,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: WHERE type(r_prop) = prop_type AND r_prop.status = prop_rel_status AND r_prop.from <= $at - AND (r_prop.to >= $at OR r_prop.to IS NULL) + AND (r_prop.to > $at OR r_prop.to IS NULL) RETURN r_prop } WITH attr_rel, prop_rel_status, prop_type, prop_node, r_prop diff --git a/backend/infrahub/core/diff/query_parser.py b/backend/infrahub/core/diff/query_parser.py index a19a5fd35a..ed12f8152c 100644 --- a/backend/infrahub/core/diff/query_parser.py +++ b/backend/infrahub/core/diff/query_parser.py @@ -212,7 +212,10 @@ def from_properties( raise DiffNoPeerIdError(f"Cannot identify peer ID for relationship property {(properties[0]).db_id}") ordered_properties_by_type: dict[DatabaseEdgeType, list[DiffRelationshipPropertyIntermediate]] = {} - chronological_properties = sorted(properties, key=lambda p: p.changed_at) + # tiebreaker for simultaneous updates is to prefer the DELETED relationship + chronological_properties = sorted( + properties, key=lambda p: (p.changed_at, p.status is RelationshipStatus.ACTIVE) + ) last_changed_at = chronological_properties[-1].changed_at for chronological_property in chronological_properties: property_key = DatabaseEdgeType(chronological_property.property_type) @@ -307,7 +310,7 @@ class DiffRelationshipIntermediate: properties_by_db_id: dict[str, set[DiffRelationshipPropertyIntermediate]] = field(default_factory=dict) _single_relationship_list: list[DiffSingleRelationshipIntermediate] = field(default_factory=list) - def add_path(self, database_path: DatabasePath) -> None: + def add_path(self, database_path: DatabasePath, diff_from_time: Timestamp, diff_to_time: Timestamp) -> None: if database_path.property_type in [ DatabaseEdgeType.IS_RELATED, DatabaseEdgeType.HAS_OWNER, @@ -323,11 +326,26 @@ def add_path(self, database_path: DatabasePath) -> None: DiffRelationshipPropertyIntermediate( db_id=db_id, property_type=database_path.property_type, - changed_at=database_path.property_changed_at, + changed_at=database_path.property_from_time, status=database_path.property_status, value=value, ) ) + to_time = database_path.property_to_time + if ( + to_time + and database_path.property_from_time < diff_from_time <= to_time <= diff_to_time + and database_path.property_status is RelationshipStatus.ACTIVE + ): + self.properties_by_db_id[db_id].add( + DiffRelationshipPropertyIntermediate( + db_id=db_id, + property_type=database_path.property_type, + changed_at=to_time, + status=RelationshipStatus.DELETED, + value=value, + ) + ) def _index_relationships(self) -> None: self._single_relationship_list = [ @@ -513,7 +531,9 @@ def _update_attribute_level(self, database_path: DatabasePath, diff_node: DiffNo if not relationship_schema: return diff_relationship = self._get_diff_relationship(diff_node=diff_node, relationship_schema=relationship_schema) - diff_relationship.add_path(database_path=database_path) + diff_relationship.add_path( + database_path=database_path, diff_from_time=self.from_time, diff_to_time=self.to_time + ) def _get_diff_attribute( self, database_path: DatabasePath, diff_node: DiffNodeIntermediate @@ -540,7 +560,7 @@ def _update_attribute_property( value = database_path.peer_id diff_property.add_value( diff_value=DiffValueIntermediate( - changed_at=database_path.property_changed_at, + changed_at=database_path.property_from_time, status=database_path.property_status, value=value, ) diff --git a/backend/infrahub/core/diff/repository/repository.py b/backend/infrahub/core/diff/repository/repository.py index 4969d9328e..15854f1a9a 100644 --- a/backend/infrahub/core/diff/repository/repository.py +++ b/backend/infrahub/core/diff/repository/repository.py @@ -2,6 +2,7 @@ from infrahub import config from infrahub.core import registry +from infrahub.core.diff.query.field_summary import EnrichedDiffNodeFieldSummaryQuery from infrahub.core.timestamp import Timestamp from infrahub.database import InfrahubDatabase, retry_db_transaction from infrahub.exceptions import ResourceNotFoundError @@ -12,6 +13,7 @@ EnrichedDiffRoot, EnrichedDiffs, EnrichedNodeCreateRequest, + NodeDiffFieldSummary, TimeRange, TrackingId, ) @@ -236,3 +238,12 @@ async def update_conflict_by_id( if not conflict_node: raise ResourceNotFoundError(f"No conflict with id {conflict_id}") return self.deserializer.deserialize_conflict(diff_conflict_node=conflict_node) + + async def get_node_field_summaries( + self, diff_branch_name: str, tracking_id: TrackingId | None = None, diff_id: str | None = None + ) -> list[NodeDiffFieldSummary]: + query = await EnrichedDiffNodeFieldSummaryQuery.init( + db=self.db, diff_branch_name=diff_branch_name, tracking_id=tracking_id, diff_id=diff_id + ) + await query.execute(db=self.db) + return await query.get_field_summaries() diff --git a/backend/infrahub/message_bus/operations/requests/diff.py b/backend/infrahub/core/diff/tasks.py similarity index 70% rename from backend/infrahub/message_bus/operations/requests/diff.py rename to backend/infrahub/core/diff/tasks.py index f15f016eb2..eda8137918 100644 --- a/backend/infrahub/message_bus/operations/requests/diff.py +++ b/backend/infrahub/core/diff/tasks.py @@ -2,36 +2,39 @@ from infrahub.core import registry from infrahub.core.diff.coordinator import DiffCoordinator +from infrahub.core.diff.models import RequestDiffRefresh, RequestDiffUpdate from infrahub.dependencies.registry import get_component_registry from infrahub.log import get_logger -from infrahub.message_bus import messages -from infrahub.services import InfrahubServices +from infrahub.services import services log = get_logger() @flow(name="diff-update") -async def update(message: messages.RequestDiffUpdate, service: InfrahubServices) -> None: +async def update_diff(model: RequestDiffUpdate) -> None: + service = services.service component_registry = get_component_registry() base_branch = await registry.get_branch(db=service.database, branch=registry.default_branch) - diff_branch = await registry.get_branch(db=service.database, branch=message.branch_name) + diff_branch = await registry.get_branch(db=service.database, branch=model.branch_name) diff_coordinator = await component_registry.get_component(DiffCoordinator, db=service.database, branch=diff_branch) await diff_coordinator.run_update( base_branch=base_branch, diff_branch=diff_branch, - from_time=message.from_time, - to_time=message.to_time, - name=message.name, + from_time=model.from_time, + to_time=model.to_time, + name=model.name, ) @flow(name="diff-refresh") -async def refresh(message: messages.RequestDiffRefresh, service: InfrahubServices) -> None: +async def refresh_diff(model: RequestDiffRefresh) -> None: + service = services.service + component_registry = get_component_registry() base_branch = await registry.get_branch(db=service.database, branch=registry.default_branch) - diff_branch = await registry.get_branch(db=service.database, branch=message.branch_name) + diff_branch = await registry.get_branch(db=service.database, branch=model.branch_name) diff_coordinator = await component_registry.get_component(DiffCoordinator, db=service.database, branch=diff_branch) - await diff_coordinator.recalculate(base_branch=base_branch, diff_branch=diff_branch, diff_id=message.diff_id) + await diff_coordinator.recalculate(base_branch=base_branch, diff_branch=diff_branch, diff_id=model.diff_id) diff --git a/backend/infrahub/core/initialization.py b/backend/infrahub/core/initialization.py index 30133f1f37..ae6989a849 100644 --- a/backend/infrahub/core/initialization.py +++ b/backend/infrahub/core/initialization.py @@ -15,6 +15,7 @@ PermissionDecision, ) from infrahub.core.graph import GRAPH_VERSION +from infrahub.core.manager import NodeManager from infrahub.core.node import Node from infrahub.core.node.ipam import BuiltinIPPrefix from infrahub.core.node.permissions import CoreGlobalPermission, CoreObjectPermission @@ -32,7 +33,6 @@ from infrahub.menu.utils import create_menu_children from infrahub.permissions import PermissionBackend from infrahub.storage import InfrahubObjectStorage -from infrahub.utils import format_label log = get_logger() @@ -160,6 +160,7 @@ async def initialization(db: InfrahubDatabase) -> None: hash_new=default_branch.active_schema_hash.main, branch=default_branch.name, ) + await default_branch.save(db=db) for branch in list(registry.branch.values()): if branch.name in [default_branch.name, GLOBAL_BRANCH_NAME]: @@ -175,6 +176,7 @@ async def initialization(db: InfrahubDatabase) -> None: f" {hash_in_db!r} >> {branch.active_schema_hash.main!r}", branch=branch.name, ) + await branch.save(db=db) # --------------------------------------------------- # Load Default Namespace @@ -295,19 +297,6 @@ async def create_ipam_namespace( return obj -async def create_initial_permission(db: InfrahubDatabase) -> Node: - permission = await Node.init(db=db, schema=InfrahubKind.GLOBALPERMISSION) - await permission.new( - db=db, - name=format_label(GlobalPermissions.SUPER_ADMIN.value), - action=GlobalPermissions.SUPER_ADMIN.value, - decision=PermissionDecision.ALLOW_ALL.value, - ) - await permission.save(db=db) - log.info(f"Created global permission: {GlobalPermissions.SUPER_ADMIN}") - return permission - - async def create_default_menu(db: InfrahubDatabase) -> None: for item in default_menu: obj = await item.to_node(db=db) @@ -320,9 +309,9 @@ async def create_super_administrator_role(db: InfrahubDatabase) -> Node: permission = await Node.init(db=db, schema=InfrahubKind.GLOBALPERMISSION) await permission.new( db=db, - name=format_label(GlobalPermissions.SUPER_ADMIN.value), action=GlobalPermissions.SUPER_ADMIN.value, decision=PermissionDecision.ALLOW_ALL.value, + description="Allow a user to do anything", ) await permission.save(db=db) log.info(f"Created global permission: {GlobalPermissions.SUPER_ADMIN}") @@ -340,30 +329,46 @@ async def create_default_roles(db: InfrahubDatabase) -> Node: repo_permission = await Node.init(db=db, schema=InfrahubKind.GLOBALPERMISSION) await repo_permission.new( db=db, - name=format_label(GlobalPermissions.MANAGE_REPOSITORIES.value), action=GlobalPermissions.MANAGE_REPOSITORIES.value, decision=PermissionDecision.ALLOW_ALL.value, + description="Allow a user to manage repositories", ) await repo_permission.save(db=db) schema_permission = await Node.init(db=db, schema=InfrahubKind.GLOBALPERMISSION) await schema_permission.new( db=db, - name=format_label(GlobalPermissions.MANAGE_SCHEMA.value), action=GlobalPermissions.MANAGE_SCHEMA.value, decision=PermissionDecision.ALLOW_ALL.value, + description="Allow a user to manage the schema", ) await schema_permission.save(db=db) proposed_change_permission = await Node.init(db=db, schema=InfrahubKind.GLOBALPERMISSION) await proposed_change_permission.new( db=db, - name=format_label(GlobalPermissions.MERGE_PROPOSED_CHANGE.value), action=GlobalPermissions.MERGE_PROPOSED_CHANGE.value, decision=PermissionDecision.ALLOW_ALL.value, + description="Allow a user to merge proposed changes", ) await proposed_change_permission.save(db=db) + # Other permissions, created to keep references of them from the start + for permission_action, permission_description in ( + (GlobalPermissions.EDIT_DEFAULT_BRANCH, "Allow a user to change data in the default branch"), + (GlobalPermissions.MANAGE_ACCOUNTS, "Allow a user to manage accounts, account roles and account groups"), + (GlobalPermissions.MANAGE_PERMISSIONS, "Allow a user to manage permissions"), + (GlobalPermissions.MERGE_BRANCH, "Allow a user to merge branches"), + ): + permission = await Node.init(db=db, schema=InfrahubKind.GLOBALPERMISSION) + await permission.new( + db=db, + action=permission_action.value, + decision=PermissionDecision.ALLOW_ALL.value, + description=permission_description, + ) + await permission.save(db=db) + view_permission = await Node.init(db=db, schema=InfrahubKind.OBJECTPERMISSION) await view_permission.new( db=db, @@ -371,6 +376,7 @@ async def create_default_roles(db: InfrahubDatabase) -> Node: namespace="*", action=PermissionAction.VIEW.value, decision=PermissionDecision.ALLOW_ALL.value, + description="Allow a user to view any object in any branch", ) await view_permission.save(db=db) @@ -381,6 +387,7 @@ async def create_default_roles(db: InfrahubDatabase) -> Node: namespace="*", action=PermissionAction.ANY.value, decision=PermissionDecision.ALLOW_OTHER.value, + description="Allow a user to change data in non-default branches", ) await modify_permission.save(db=db) @@ -409,6 +416,29 @@ async def create_default_roles(db: InfrahubDatabase) -> Node: return role +async def create_anonymous_role(db: InfrahubDatabase) -> Node: + deny_permission = await Node.init(db=db, schema=InfrahubKind.OBJECTPERMISSION) + await deny_permission.new( + db=db, name="*", namespace="*", action=PermissionAction.ANY.value, decision=PermissionDecision.DENY.value + ) + await deny_permission.save(db=db) + + view_permission = await NodeManager.get_one_by_hfid( + db=db, + kind=InfrahubKind.OBJECTPERMISSION, + hfid=["*", "*", PermissionAction.VIEW.value, str(PermissionDecision.ALLOW_ALL.value)], + ) + + role = await Node.init(db=db, schema=InfrahubKind.ACCOUNTROLE) + await role.new( + db=db, name=config.SETTINGS.main.anonymous_access_role, permissions=[deny_permission, view_permission] + ) + await role.save(db=db) + log.info(f"Created anonymous account role: {config.SETTINGS.main.anonymous_access_role}") + + return role + + async def create_super_administrators_group( db: InfrahubDatabase, role: Node, admin_accounts: list[CoreAccount] ) -> Node: @@ -486,6 +516,9 @@ async def first_time_initialization(db: InfrahubDatabase) -> None: await create_super_administrators_group(db=db, role=administrator_role, admin_accounts=admin_accounts) await create_default_roles(db=db) + if config.SETTINGS.main.allow_anonymous_access: + await create_anonymous_role(db=db) + # -------------------------------------------------- # Create Default IPAM Namespace # -------------------------------------------------- diff --git a/backend/infrahub/core/ipam/tasks.py b/backend/infrahub/core/ipam/tasks.py index 82b123be19..a593c71360 100644 --- a/backend/infrahub/core/ipam/tasks.py +++ b/backend/infrahub/core/ipam/tasks.py @@ -6,6 +6,7 @@ from infrahub.core import registry from infrahub.core.ipam.reconciler import IpamReconciler from infrahub.services import services +from infrahub.workflows.utils import add_branch_tag from .model import IpamNodeDetails @@ -13,10 +14,18 @@ from infrahub.core.ipam.constants import AllIPTypes -@flow(name="ipam-reconciliation") +@flow( + name="ipam-reconciliation", + flow_run_name="branch-{branch}", + description="Ensure the IPAM Tree is up to date", + persist_result=False, +) async def ipam_reconciliation(branch: str, ipam_node_details: list[IpamNodeDetails]) -> None: service = services.service branch_obj = await registry.get_branch(db=service.database, branch=branch) + + await add_branch_tag(branch_name=branch_obj.name) + ipam_reconciler = IpamReconciler(db=service.database, branch=branch_obj) for ipam_node_detail_item in ipam_node_details: diff --git a/backend/infrahub/core/merge.py b/backend/infrahub/core/merge.py index 242162cded..1cb00c19f7 100644 --- a/backend/infrahub/core/merge.py +++ b/backend/infrahub/core/merge.py @@ -3,15 +3,12 @@ from typing import TYPE_CHECKING, Optional, Union from infrahub.core.constants import DiffAction, RepositoryInternalStatus -from infrahub.core.diff.coordinator import DiffCoordinator -from infrahub.core.diff.merger.merger import DiffMerger from infrahub.core.manager import NodeManager from infrahub.core.models import SchemaBranchDiff, SchemaUpdateValidationResult from infrahub.core.protocols import CoreRepository from infrahub.core.registry import registry from infrahub.core.schema import GenericSchema, NodeSchema from infrahub.core.timestamp import Timestamp -from infrahub.dependencies.registry import get_component_registry from infrahub.exceptions import ValidationError from infrahub.message_bus import messages @@ -19,6 +16,8 @@ if TYPE_CHECKING: from infrahub.core.branch import Branch + from infrahub.core.diff.coordinator import DiffCoordinator + from infrahub.core.diff.merger.merger import DiffMerger from infrahub.core.models import SchemaUpdateConstraintInfo, SchemaUpdateMigrationInfo from infrahub.core.schema.manager import SchemaDiff from infrahub.core.schema.schema_branch import SchemaBranch @@ -33,12 +32,16 @@ def __init__( self, db: InfrahubDatabase, source_branch: Branch, + diff_coordinator: DiffCoordinator, + diff_merger: DiffMerger, destination_branch: Optional[Branch] = None, service: Optional[InfrahubServices] = None, ): self.source_branch = source_branch self.destination_branch: Branch = destination_branch or registry.get_branch_from_registry() self.db = db + self.diff_coordinator = diff_coordinator + self.diff_merger = diff_merger self.migrations: list[SchemaUpdateMigrationInfo] = [] self._graph_diff: Optional[BranchDiffer] = None @@ -226,50 +229,32 @@ async def validate_graph(self) -> list[DataConflict]: async def merge( self, at: Optional[Union[str, Timestamp]] = None, - conflict_resolution: Optional[dict[str, bool]] = None, + conflict_resolution: Optional[dict[str, bool]] = None, # pylint: disable=unused-argument ) -> None: """Merge the current branch into main.""" - conflict_resolution = conflict_resolution or {} - conflicts = await self.validate_branch() - - if conflict_resolution: - errors: list[str] = [] - for conflict in conflicts: - if conflict.conflict_path not in conflict_resolution: - errors.append(str(conflict)) - - if errors: - raise ValidationError( - f"Unable to merge the branch '{self.source_branch.name}', conflict resolution missing: {', '.join(errors)}" - ) + if self.source_branch.name == registry.default_branch: + raise ValidationError(f"Unable to merge the branch '{self.source_branch.name}' into itself") - elif conflicts: - errors = [str(conflict) for conflict in conflicts] + enriched_diff = await self.diff_coordinator.update_branch_diff( + base_branch=self.destination_branch, diff_branch=self.source_branch + ) + conflict_map = enriched_diff.get_all_conflicts() + errors: list[str] = [] + for conflict_path, conflict in conflict_map.items(): + if conflict.selected_branch is None: + errors.append(conflict_path) + + if errors: raise ValidationError( - f"Unable to merge the branch '{self.source_branch.name}', validation failed: {', '.join(errors)}" + f"Unable to merge the branch '{self.source_branch.name}', conflict resolution missing: {', '.join(errors)}" ) - if self.source_branch.name == registry.default_branch: - raise ValidationError(f"Unable to merge the branch '{self.source_branch.name}' into itself") - # TODO need to find a way to properly communicate back to the user any issue that could come up during the merge # From the Graph or From the repositories at = Timestamp(at) - await self.merge_graph(at=at) + await self.diff_merger.merge_graph(at=at) await self.merge_repositories() - async def merge_graph( - self, - at: Timestamp, - ) -> None: - component_registry = get_component_registry() - diff_coordinator = await component_registry.get_component( - DiffCoordinator, db=self.db, branch=self.source_branch - ) - await diff_coordinator.update_branch_diff(base_branch=self.destination_branch, diff_branch=self.source_branch) - diff_merger = await component_registry.get_component(DiffMerger, db=self.db, branch=self.source_branch) - await diff_merger.merge_graph(at=at) - async def merge_repositories(self) -> None: # Collect all Repositories in Main because we'll need the commit in Main for each one. repos_in_main_list = await NodeManager.query(schema=CoreRepository, db=self.db) diff --git a/backend/infrahub/core/migrations/graph/m012_convert_account_generic.py b/backend/infrahub/core/migrations/graph/m012_convert_account_generic.py index 37c49b1556..616deb5a39 100644 --- a/backend/infrahub/core/migrations/graph/m012_convert_account_generic.py +++ b/backend/infrahub/core/migrations/graph/m012_convert_account_generic.py @@ -61,11 +61,18 @@ def __init__(self, **kwargs: Any): def render_match(self) -> str: query = """ // Find all the active nodes - MATCH (node:Node) - WHERE ( "Profile%(node_kind)s" IN LABELS(node) OR "%(node_kind)s" IN LABELS(node) ) - AND exists((node)-[:HAS_ATTRIBUTE]-(:Attribute { name: $prev_attr.name })) - AND NOT exists((node)-[:HAS_ATTRIBUTE]-(:Attribute { name: $new_attr.name })) - + CALL { + MATCH (node:%(node_kind)s) + WHERE exists((node)-[:HAS_ATTRIBUTE]-(:Attribute { name: $prev_attr.name })) + AND NOT exists((node)-[:HAS_ATTRIBUTE]-(:Attribute { name: $new_attr.name })) + RETURN node + UNION + MATCH (node:Profile%(node_kind)s) + WHERE exists((node)-[:HAS_ATTRIBUTE]-(:Attribute { name: $prev_attr.name })) + AND NOT exists((node)-[:HAS_ATTRIBUTE]-(:Attribute { name: $new_attr.name })) + RETURN node + } + WITH node """ % {"node_kind": self.previous_attr.node_kind} return query @@ -85,12 +92,14 @@ def __init__(self, **kwargs: Any): InfrahubKind.LINEAGEOWNER, InfrahubKind.LINEAGESOURCE, ], + kind=InfrahubKind.ACCOUNT, ) previous_node = SchemaNodeInfo( name="Account", namespace="Core", branch_support=BranchSupportType.AGNOSTIC.value, labels=[InfrahubKind.ACCOUNT, InfrahubKind.LINEAGEOWNER, InfrahubKind.LINEAGESOURCE], + kind=InfrahubKind.ACCOUNT, ) branch = Branch( diff --git a/backend/infrahub/core/migrations/query/attribute_add.py b/backend/infrahub/core/migrations/query/attribute_add.py index d34a8e285d..f55dc84b2f 100644 --- a/backend/infrahub/core/migrations/query/attribute_add.py +++ b/backend/infrahub/core/migrations/query/attribute_add.py @@ -37,6 +37,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No self.params["attr_name"] = self.attribute_name self.params["attr_type"] = self.attribute_kind self.params["branch_support"] = self.branch_support + self.params["current_time"] = self.at.to_string() if self.default_value is not None: self.params["attr_value"] = self.default_value @@ -55,28 +56,34 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No query = """ MATCH p = (n:%(node_kind)s) - WHERE NOT exists((n)-[:HAS_ATTRIBUTE]-(:Attribute { name: $attr_name })) CALL { WITH n - MATCH (root:Root)<-[r:IS_PART_OF]-(n) - WHERE %(branch_filter)s - RETURN n as n1, r as r1 - ORDER BY r.branch_level DESC, r.from DESC + MATCH (root:Root)<-[r1:IS_PART_OF]-(n) + OPTIONAL MATCH (n)-[r2:HAS_ATTRIBUTE]-(:Attribute { name: $attr_name }) + WHERE all(r in [r1, r2] WHERE (%(branch_filter)s)) + RETURN n as n1, r1 as r11, r2 as r12 + ORDER BY r2.branch_level DESC, r2.from ASC, r1.branch_level DESC, r1.from ASC LIMIT 1 } - WITH n1 as n, r1 as rb - WHERE rb.status = "active" + WITH n1 as n, r11 as r1, r12 as r2 + WHERE r1.status = "active" AND (r2 IS NULL OR r2.status = "deleted") MERGE (av:AttributeValue { value: $attr_value, is_default: true }) MERGE (is_protected_value:Boolean { value: $is_protected_default }) MERGE (is_visible_value:Boolean { value: $is_visible_default }) - WITH n, av, is_protected_value, is_visible_value + WITH n, av, is_protected_value, is_visible_value, r2 CREATE (a:Attribute { name: $attr_name, branch_support: $branch_support }) CREATE (n)-[:HAS_ATTRIBUTE $rel_props ]->(a) CREATE (a)-[:HAS_VALUE $rel_props ]->(av) CREATE (a)-[:IS_PROTECTED $rel_props]->(is_protected_value) CREATE (a)-[:IS_VISIBLE $rel_props]->(is_visible_value) - """ % {"branch_filter": branch_filter, "node_kind": self.node_kind} + %(uuid_generation)s + FOREACH (i in CASE WHEN r2.status = "deleted" THEN [1] ELSE [] END | + SET r2.to = $current_time + ) + """ % { + "branch_filter": branch_filter, + "node_kind": self.node_kind, + "uuid_generation": db.render_uuid_generation(node_label="a", node_attr="uuid"), + } self.add_to_query(query) self.return_labels = ["n.uuid", "a.uuid"] - - self.add_to_query(db.render_uuid_generation(node_label="a", node_attr="uuid")) diff --git a/backend/infrahub/core/migrations/query/attribute_rename.py b/backend/infrahub/core/migrations/query/attribute_rename.py index e712ad7482..efca8fffbb 100644 --- a/backend/infrahub/core/migrations/query/attribute_rename.py +++ b/backend/infrahub/core/migrations/query/attribute_rename.py @@ -38,9 +38,16 @@ def __init__( def render_match(self) -> str: query = """ // Find all the active nodes - MATCH (node:Node) - WHERE ( "Profile%(node_kind)s" IN LABELS(node) OR "%(node_kind)s" IN LABELS(node) ) - AND exists((node)-[:HAS_ATTRIBUTE]-(:Attribute { name: $prev_attr.name })) + CALL { + MATCH (node:%(node_kind)s) + WHERE exists((node)-[:HAS_ATTRIBUTE]-(:Attribute { name: $prev_attr.name })) + RETURN node + UNION + MATCH (node:Profile%(node_kind)s) + WHERE exists((node)-[:HAS_ATTRIBUTE]-(:Attribute { name: $prev_attr.name })) + RETURN node + } + WITH node """ % {"node_kind": self.previous_attr.node_kind} return query diff --git a/backend/infrahub/core/migrations/query/node_duplicate.py b/backend/infrahub/core/migrations/query/node_duplicate.py index df8df51bf6..3dc03c25d4 100644 --- a/backend/infrahub/core/migrations/query/node_duplicate.py +++ b/backend/infrahub/core/migrations/query/node_duplicate.py @@ -19,10 +19,7 @@ class SchemaNodeInfo(BaseModel): namespace: str branch_support: str = BranchSupportType.AWARE.value labels: list[str] - - @property - def kind(self) -> str: - return self.namespace + self.name + kind: str class NodeDuplicateQuery(Query): diff --git a/backend/infrahub/core/migrations/schema/node_attribute_remove.py b/backend/infrahub/core/migrations/schema/node_attribute_remove.py index f73a79cffb..0e20cfdeae 100644 --- a/backend/infrahub/core/migrations/schema/node_attribute_remove.py +++ b/backend/infrahub/core/migrations/schema/node_attribute_remove.py @@ -59,8 +59,8 @@ def render_sub_query_per_rel_type(rel_type: str, rel_def: FieldInfo) -> str: query = """ // Find all the active nodes - MATCH (node:Node) - WHERE $node_kind IN LABELS(node) AND exists((node)-[:HAS_ATTRIBUTE]-(:Attribute { name: $attr_name })) + MATCH (node:%(node_kind)s) + WHERE exists((node)-[:HAS_ATTRIBUTE]-(:Attribute { name: $attr_name })) CALL { WITH node MATCH (root:Root)<-[r:IS_PART_OF]-(node) @@ -102,7 +102,11 @@ def render_sub_query_per_rel_type(rel_type: str, rel_def: FieldInfo) -> str: SET rb.to = $current_time ) RETURN DISTINCT active_attr - """ % {"branch_filter": branch_filter, "sub_query_all": sub_query_all} + """ % { + "branch_filter": branch_filter, + "sub_query_all": sub_query_all, + "node_kind": self.migration.new_schema.kind, + } self.add_to_query(query) diff --git a/backend/infrahub/core/migrations/schema/node_kind_update.py b/backend/infrahub/core/migrations/schema/node_kind_update.py index 4ad01dd4db..1df36ec90f 100644 --- a/backend/infrahub/core/migrations/schema/node_kind_update.py +++ b/backend/infrahub/core/migrations/schema/node_kind_update.py @@ -19,12 +19,14 @@ def __init__( namespace=migration.new_schema.namespace, branch_support=migration.new_schema.branch.value, labels=migration.new_schema.get_labels(), + kind=migration.new_schema.kind, ) previous_node = SchemaNodeInfo( name=migration.previous_schema.name, namespace=migration.previous_schema.namespace, branch_support=migration.previous_schema.branch.value, labels=migration.previous_schema.get_labels(), + kind=migration.previous_schema.kind, ) super().__init__(migration=migration, new_node=new_node, previous_node=previous_node, **kwargs) diff --git a/backend/infrahub/core/migrations/schema/node_remove.py b/backend/infrahub/core/migrations/schema/node_remove.py index 5867806390..223ea08bbd 100644 --- a/backend/infrahub/core/migrations/schema/node_remove.py +++ b/backend/infrahub/core/migrations/schema/node_remove.py @@ -36,7 +36,6 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) self.params.update(branch_params) - self.params["node_kind"] = self.migration.previous_schema.kind self.params["current_time"] = self.at.to_string() self.params["branch_name"] = self.branch.name @@ -52,8 +51,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No # ruff: noqa: E501 query = """ // Find all the active nodes - MATCH (node:Node) - WHERE $node_kind IN LABELS(node) + MATCH (node:%(node_kind)s) CALL { WITH node MATCH (root:Root)<-[r:IS_PART_OF]-(node) @@ -66,7 +64,11 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No WHERE rb.status = "active" %(node_remove_query)s RETURN DISTINCT active_node - """ % {"branch_filter": branch_filter, "node_remove_query": node_remove_query} + """ % { + "branch_filter": branch_filter, + "node_remove_query": node_remove_query, + "node_kind": self.migration.previous_schema.kind, + } self.add_to_query(query) def get_nbr_migrations_executed(self) -> int: diff --git a/backend/infrahub/core/migrations/schema/tasks.py b/backend/infrahub/core/migrations/schema/tasks.py index d67e4ba2aa..4d19d65679 100644 --- a/backend/infrahub/core/migrations/schema/tasks.py +++ b/backend/infrahub/core/migrations/schema/tasks.py @@ -10,6 +10,7 @@ ) from infrahub.message_bus.operations.schema.migration import schema_path_migrate from infrahub.services import services +from infrahub.workflows.utils import add_branch_tag from .models import SchemaApplyMigrationData # noqa: TCH001 @@ -17,9 +18,10 @@ from infrahub.core.schema import MainSchemaTypes -@flow +@flow(name="schema-migrations-apply") async def schema_apply_migrations(message: SchemaApplyMigrationData) -> list[str]: service = services.service + await add_branch_tag(branch_name=message.branch.name) batch = InfrahubBatch() error_messages: list[str] = [] diff --git a/backend/infrahub/core/models.py b/backend/infrahub/core/models.py index 0339a40b72..92e65d2409 100644 --- a/backend/infrahub/core/models.py +++ b/backend/infrahub/core/models.py @@ -366,7 +366,7 @@ def _get_signature_field(cls, value: Any) -> list[bytes]: else: hashes.append(cls._get_hash_value(value)) - return hashes + return sorted(hashes) @property def _sorting_id(self) -> tuple[Any]: diff --git a/backend/infrahub/core/protocols.py b/backend/infrahub/core/protocols.py index 090191d585..afb936d294 100644 --- a/backend/infrahub/core/protocols.py +++ b/backend/infrahub/core/protocols.py @@ -67,7 +67,7 @@ class CoreArtifactTarget(CoreNode): class CoreBasePermission(CoreNode): - decision: Enum + description: StringOptional identifier: StringOptional roles: RelationshipManager @@ -143,6 +143,7 @@ class CoreMenu(CoreNode): icon: StringOptional protected: Boolean order_weight: Integer + required_permissions: ListAttributeOptional section: Enum parent: RelationshipManager children: RelationshipManager @@ -215,7 +216,7 @@ class CoreAccount(LineageOwner, LineageSource, CoreGenericAccount): pass -class CoreAccountGroup(CoreGroup): +class CoreAccountGroup(LineageOwner, LineageSource, CoreGroup): roles: RelationshipManager @@ -343,8 +344,8 @@ class CoreGeneratorValidator(CoreValidator): class CoreGlobalPermission(CoreBasePermission): - name: String action: Dropdown + decision: Enum class CoreGraphQLQuery(CoreNode): @@ -395,6 +396,7 @@ class CoreObjectPermission(CoreBasePermission): namespace: String name: String action: Enum + decision: Enum class CoreObjectThread(CoreThread): diff --git a/backend/infrahub/core/protocols_base.py b/backend/infrahub/core/protocols_base.py index 4d0e2c4c13..10f970bfe1 100644 --- a/backend/infrahub/core/protocols_base.py +++ b/backend/infrahub/core/protocols_base.py @@ -44,15 +44,15 @@ async def transaction(self, name: Optional[str]) -> AsyncTransaction: ... async def close(self) -> None: ... async def execute_query( - self, query: str, params: Optional[dict[str, Any]] = None, name: Optional[str] = "undefined" + self, query: str, params: Optional[dict[str, Any]] = None, name: str = "undefined" ) -> list[Record]: ... async def execute_query_with_metadata( - self, query: str, params: Optional[dict[str, Any]] = None, name: Optional[str] = "undefined" + self, query: str, params: Optional[dict[str, Any]] = None, name: str = "undefined" ) -> tuple[list[Record], dict[str, Any]]: ... async def run_query( - self, query: str, params: Optional[dict[str, Any]] = None, name: Optional[str] = "undefined" + self, query: str, params: Optional[dict[str, Any]] = None, name: str = "undefined" ) -> AsyncResult: ... def render_list_comprehension(self, items: str, item_name: str) -> str: ... diff --git a/backend/infrahub/core/query/__init__.py b/backend/infrahub/core/query/__init__.py index b5076c8032..e4b7190bd4 100644 --- a/backend/infrahub/core/query/__init__.py +++ b/backend/infrahub/core/query/__init__.py @@ -406,6 +406,11 @@ async def init( async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: raise NotImplementedError + def get_context(self) -> dict[str, str]: + """Provide additional context for this query, beyond the name. + Right now it's mainly used to add more labels to the metrics.""" + return {} + def add_to_query(self, query: Union[str, list[str]]) -> None: """Add a new section at the end of the query. @@ -529,13 +534,15 @@ async def execute(self, db: InfrahubDatabase) -> Self: if self.type == QueryType.READ: if self.limit or self.offset: - results = await db.execute_query(query=query_str, params=self.params, name=self.name) + results = await db.execute_query( + query=query_str, params=self.params, name=self.name, context=self.get_context() + ) else: results = await self.query_with_size_limit(db=db) elif self.type == QueryType.WRITE: results, metadata = await db.execute_query_with_metadata( - query=query_str, params=self.params, name=self.name + query=query_str, params=self.params, name=self.name, context=self.get_context() ) if "stats" in metadata: self.stats.add(metadata.get("stats")) @@ -560,6 +567,7 @@ async def query_with_size_limit(self, db: InfrahubDatabase) -> list[Record]: query=self.get_query(limit=query_limit, offset=offset), params=self.params, name=self.name, + context=self.get_context(), ) if "stats" in metadata: self.stats.add(metadata.get("stats")) diff --git a/backend/infrahub/core/query/attribute.py b/backend/infrahub/core/query/attribute.py index a949610666..9dd050a885 100644 --- a/backend/infrahub/core/query/attribute.py +++ b/backend/infrahub/core/query/attribute.py @@ -18,15 +18,12 @@ class AttributeQuery(Query): def __init__( self, - attr: BaseAttribute = None, + attr: BaseAttribute, attr_id: Optional[str] = None, at: Optional[Union[Timestamp, str]] = None, branch: Optional[Branch] = None, - **kwargs, + **kwargs: Any, ): - if not attr and not attr_id: - raise ValueError("Either attr or attr_id must be defined, none provided") - self.attr = attr self.attr_id = attr_id or attr.db_id @@ -46,7 +43,7 @@ class AttributeUpdateValueQuery(AttributeQuery): raise_error_if_empty: bool = True - async def query_init(self, db: InfrahubDatabase, **kwargs) -> None: + async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: at = self.at or self.attr.at self.params["attr_uuid"] = self.attr.id @@ -84,8 +81,8 @@ class AttributeUpdateFlagQuery(AttributeQuery): def __init__( self, flag_name: str, - **kwargs, - ): + **kwargs: Any, + ) -> None: SUPPORTED_FLAGS = ["is_visible", "is_protected"] if flag_name not in SUPPORTED_FLAGS: @@ -95,7 +92,7 @@ def __init__( super().__init__(**kwargs) - async def query_init(self, db: InfrahubDatabase, **kwargs) -> None: + async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: at = self.at or self.attr.at self.params["attr_uuid"] = self.attr.id @@ -125,14 +122,14 @@ def __init__( self, prop_name: str, prop_id: str, - **kwargs, + **kwargs: Any, ): self.prop_name = prop_name self.prop_id = prop_id super().__init__(**kwargs) - async def query_init(self, db: InfrahubDatabase, **kwargs) -> None: + async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: at = self.at or self.attr.at self.params["attr_uuid"] = self.attr.id @@ -161,7 +158,7 @@ class AttributeGetQuery(AttributeQuery): name = "attribute_get" type: QueryType = QueryType.READ - async def query_init(self, db: InfrahubDatabase, **kwargs) -> None: + async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: self.params["attr_uuid"] = self.attr.id self.params["node_uuid"] = self.attr.node.id diff --git a/backend/infrahub/core/query/diff.py b/backend/infrahub/core/query/diff.py index bccd4de226..8a26feb13c 100644 --- a/backend/infrahub/core/query/diff.py +++ b/backend/infrahub/core/query/diff.py @@ -216,25 +216,31 @@ async def query_init(self, db: InfrahubDatabase, **kwargs) -> None: query = ( """ CALL { - MATCH p = ((src:Node)-[r1:IS_RELATED]-(rel:Relationship)-[r2:IS_RELATED]-(dst:Node)) + MATCH p = (src:Node)-[r1:IS_RELATED]-(rel:Relationship)-[r2:IS_RELATED]-(dst:Node) WHERE (rel.branch_support IN $branch_support AND %s r1.branch = r2.branch AND - (r1.to = r2.to OR (r1.to is NULL AND r2.to is NULL)) AND r1.from = r2.from AND r1.status = r2.status - AND all(r IN relationships(p) WHERE (r.branch IN $branch_names AND r.from >= $diff_from AND r.from <= $diff_to - AND ((r.to >= $diff_from AND r.to <= $diff_to) OR r.to is NULL)) + (r1.to = r2.to OR (r1.to is NULL AND r2.to is NULL)) + AND r1.from = r2.from AND r1.status = r2.status + AND r1.branch IN $branch_names + AND ( + (r1.from >= $diff_from AND r1.from <= $diff_to AND r1.to is NULL) + OR (r1.to >= $diff_from AND r1.to <= $diff_to) ) ) RETURN DISTINCT [rel.uuid, r1.branch] as identifier, rel, r1.branch as branch_name } CALL { WITH rel, branch_name - MATCH p = ((sn:Node)-[r1:IS_RELATED]-(rel:Relationship)-[r2:IS_RELATED]-(dn:Node)) + MATCH p = (sn:Node)-[r1:IS_RELATED]-(rel:Relationship)-[r2:IS_RELATED]-(dn:Node) WHERE (rel.branch_support IN $branch_support AND r1.branch = r2.branch AND - (r1.to = r2.to OR (r1.to is NULL AND r2.to is NULL)) AND r1.from = r2.from AND r1.status = r2.status - AND all(r IN relationships(p) WHERE (r.branch = branch_name AND r.from >= $diff_from AND r.from <= $diff_to - AND ((r.to >= $diff_from AND r.to <= $diff_to) OR r.to is NULL)) + (r1.to = r2.to OR (r1.to is NULL AND r2.to is NULL)) + AND r1.from = r2.from AND r1.status = r2.status + AND r1.branch = branch_name + AND ( + (r1.from >= $diff_from AND r1.from <= $diff_to AND r1.to is NULL) + OR (r1.to >= $diff_from AND r1.to <= $diff_to) ) + AND sn <> dn ) - AND sn <> dn RETURN rel as rel1, sn as sn1, dn as dn1, r1 as r11, r2 as r21 ORDER BY r1.branch_level DESC, r1.from DESC LIMIT 1 @@ -266,8 +272,11 @@ async def query_init(self, db: InfrahubDatabase, **kwargs) -> None: query = """ CALL { MATCH (rel:Relationship)-[r3:IS_VISIBLE|IS_PROTECTED|HAS_SOURCE|HAS_OWNER]-() - WHERE (r3.branch IN $branch_names AND r3.from >= $diff_from AND r3.from <= $diff_to - AND ((r3.to >= $diff_from AND r3.to <= $diff_to ) OR r3.to is NULL)) + WHERE ( + r3.branch IN $branch_names + AND (r3.from >= $diff_from AND r3.from <= $diff_to AND r3.to is NULL) + OR (r3.to >= $diff_from AND r3.to <= $diff_to) + ) RETURN DISTINCT rel } CALL { @@ -281,11 +290,22 @@ async def query_init(self, db: InfrahubDatabase, **kwargs) -> None: LIMIT 1 } WITH rel1 as rel, sn1 as sn, dn1 as dn, r11 as r1, r21 as r2 - MATCH (rel:Relationship)-[r3:IS_VISIBLE|IS_PROTECTED|HAS_SOURCE|HAS_OWNER]-(rp) - WHERE ( - r3.branch IN $branch_names AND r3.from >= $diff_from AND r3.from <= $diff_to - AND ((r3.to >= $diff_from AND r3.to <= $diff_to) OR r3.to is NULL) - ) + CALL { + // ----------------------- + // group results to the latest entry for each edge type (IS_VISIBLE, etc.) + // ----------------------- + WITH rel + MATCH (rel:Relationship)-[r3:IS_VISIBLE|IS_PROTECTED|HAS_SOURCE|HAS_OWNER]->(prop) + WHERE ( + r3.branch IN $branch_names + AND (r3.from >= $diff_from AND r3.from <= $diff_to AND r3.to is NULL) + OR (r3.to >= $diff_from AND r3.to <= $diff_to) + ) + WITH r3, prop + ORDER BY r3.branch, type(r3), r3.from DESC + WITH r3.branch AS r3_branch, type(r3) AS type_r3, head(collect([r3, prop])) AS data + RETURN data[0] AS r3, data[1] AS rp + } """ % "\n AND ".join(rels_filter) self.add_to_query(query) diff --git a/backend/infrahub/core/query/node.py b/backend/infrahub/core/query/node.py index a990458f1c..6f12a96fa7 100644 --- a/backend/infrahub/core/query/node.py +++ b/backend/infrahub/core/query/node.py @@ -792,7 +792,6 @@ def _get_tracked_variables(self) -> list[str]: async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: self.order_by = [] - self.params["node_kind"] = self.schema.kind self.return_labels = ["n.uuid", "rb.branch", f"{db.get_id_function_name()}(rb) as rb_id"] where_clause_elements = [] @@ -803,8 +802,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: self.params.update(branch_params) query = """ - MATCH p = (n:Node) - WHERE $node_kind IN LABELS(n) + MATCH (n:%(node_kind)s) CALL { WITH n MATCH (root:Root)<-[r:IS_PART_OF]-(n) @@ -815,7 +813,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: } WITH n, r as rb WHERE rb.status = "active" - """ % {"branch_filter": branch_filter} + """ % {"branch_filter": branch_filter, "node_kind": self.schema.kind} self.add_to_query(query) use_simple = False if self.filters and "id" in self.filters: @@ -859,6 +857,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: self.order_by.append(far.final_value_query_variable) continue self.order_by.append(far.node_value_query_variable) + self.order_by.append("n.uuid") async def _add_node_filter_attributes( self, diff --git a/backend/infrahub/core/query/relationship.py b/backend/infrahub/core/query/relationship.py index 81180d96b2..a8842837b4 100644 --- a/backend/infrahub/core/query/relationship.py +++ b/backend/infrahub/core/query/relationship.py @@ -128,15 +128,15 @@ class FullRelationshipIdentifier: class RelationshipQuery(Query): def __init__( self, - rel: Union[type[Relationship], Relationship] = None, + rel: Union[type[Relationship], Relationship] | None = None, rel_type: Optional[str] = None, - source: Node = None, - source_id: UUID = None, - destination: Node = None, - destination_id: UUID = None, - schema: RelationshipSchema = None, - branch: Branch = None, - at: Union[Timestamp, str] = None, + source: Node | None = None, + source_id: UUID | None = None, + destination: Node | None = None, + destination_id: UUID | None = None, + schema: RelationshipSchema | None = None, + branch: Branch | None = None, + at: Union[Timestamp, str] | None = None, **kwargs, ): if not source and not source_id: @@ -196,7 +196,7 @@ class RelationshipCreateQuery(RelationshipQuery): def __init__( self, destination: Node = None, - destination_id: UUID = None, + destination_id: UUID | None = None, **kwargs, ): if not destination and not destination_id: @@ -701,7 +701,7 @@ def get_peers(self) -> Generator[RelationshipPeerData, None, None]: rel_node_id=result.get("rl").get("uuid"), updated_at=rels[0]["from"], rels=[RelData.from_db(rel) for rel in rels], - branch=self.branch, + branch=self.branch.name, properties={}, ) diff --git a/backend/infrahub/core/query/resource_manager.py b/backend/infrahub/core/query/resource_manager.py index e5989c9ff3..806e273874 100644 --- a/backend/infrahub/core/query/resource_manager.py +++ b/backend/infrahub/core/query/resource_manager.py @@ -116,27 +116,28 @@ def __init__( super().__init__(**kwargs) # type: ignore[arg-type] async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: - self.params["pool_id"] = self.pool.get_id() self.params["node_attribute"] = self.pool.node_attribute.value self.params["start_range"] = self.pool.start_range.value self.params["end_range"] = self.pool.end_range.value - self.params["time_at"] = self.at.to_string() - - def rel_filter(rel_name: str) -> str: - return f"{rel_name}.from <= $time_at AND ({rel_name}.to IS NULL OR {rel_name}.to >= $time_at)" + branch_filter, branch_params = self.branch.get_query_filter_path( + at=self.at.to_string(), branch_agnostic=self.branch_agnostic + ) + self.params.update(branch_params) - query = f""" - MATCH (n:%(node)s)-[ha:HAS_ATTRIBUTE]-(a:Attribute {{name: $node_attribute}})-[hv:HAS_VALUE]-(av:AttributeValue) + query = """ + MATCH (n:%(node)s)-[ha:HAS_ATTRIBUTE]-(a:Attribute {name: $node_attribute})-[hv:HAS_VALUE]-(av:AttributeValue) MATCH (a)-[hs:HAS_SOURCE]-(pool:%(number_pool_kind)s) WHERE av.value >= $start_range and av.value <= $end_range - AND ({rel_filter("ha")}) - AND ({rel_filter("hv")}) - AND ({rel_filter("hs")}) + AND all(r in [ha, hv, hs] WHERE (%(branch_filter)s)) + AND ha.status = "active" + AND hv.status = "active" + AND hs.status = "active" """ % { "node": self.pool.node.value, "number_pool_kind": InfrahubKind.NUMBERPOOL, + "branch_filter": branch_filter, } self.add_to_query(query) @@ -207,15 +208,27 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No ) self.params.update(branch_params) + self.params["attribute_name"] = self.pool.node_attribute.value query = """ - MATCH (pool:%(number_pool)s { uuid: $pool_id })-[r:IS_RESERVED]->(av:AttributeValue ) - WHERE - toInteger(av.value) >= $start_range and toInteger(av.value) <= $end_range - AND - %(branch_filter)s - """ % {"branch_filter": branch_filter, "number_pool": InfrahubKind.NUMBERPOOL} - + MATCH (pool:%(number_pool)s { uuid: $pool_id }) + CALL { + WITH pool + MATCH (pool)-[res:IS_RESERVED]->(av:AttributeValue)<-[hv:HAS_VALUE]-(attr:Attribute) + WHERE + attr.name = $attribute_name + AND + toInteger(av.value) >= $start_range and toInteger(av.value) <= $end_range + AND + all(r in [res, hv] WHERE (%(branch_filter)s)) + RETURN av, (res.status = "active" AND hv.status = "active") AS is_active + } + WITH av, is_active + WHERE is_active = TRUE + """ % { + "branch_filter": branch_filter, + "number_pool": InfrahubKind.NUMBERPOOL, + } self.add_to_query(query) self.return_labels = ["av.value"] self.order_by = ["av.value"] diff --git a/backend/infrahub/core/relationship/model.py b/backend/infrahub/core/relationship/model.py index 4c1a083347..9c55197724 100644 --- a/backend/infrahub/core/relationship/model.py +++ b/backend/infrahub/core/relationship/model.py @@ -1081,13 +1081,18 @@ async def remove_in_db( remove_at = Timestamp(at) branch = self.get_branch_based_on_support_type() - # when we remove a relationship we need to : # - Update the existing relationship if we are on the same branch - # - Create a new rel of type DELETED in the right branch rel_ids_per_branch = peer_data.rel_ids_per_branch() if branch.name in rel_ids_per_branch: await update_relationships_to([str(ri) for ri in rel_ids_per_branch[self.branch.name]], to=remove_at, db=db) + # - Create a new rel of type DELETED if the existing relationship is on a different branch + rel_branches: set[str] = set() + if peer_data.rels: + rel_branches = {r.branch for r in peer_data.rels} + if rel_branches == {peer_data.branch}: + return + query = await RelationshipDataDeleteQuery.init( db=db, rel=self.rel_class, diff --git a/backend/infrahub/core/schema/definitions/core.py b/backend/infrahub/core/schema/definitions/core.py index 1ea4d6d445..8db9acb0fb 100644 --- a/backend/infrahub/core/schema/definitions/core.py +++ b/backend/infrahub/core/schema/definitions/core.py @@ -80,12 +80,13 @@ {"name": "icon", "kind": "Text", "optional": True, "order_weight": 4000}, {"name": "protected", "kind": "Boolean", "default_value": False, "read_only": True, "order_weight": 5000}, {"name": "order_weight", "kind": "Number", "default_value": 2000, "order_weight": 6000}, + {"name": "required_permissions", "kind": "List", "optional": True, "order_weight": 7000}, { "name": "section", "kind": "Text", "enum": ["object", "internal"], "default_value": "object", - "order_weight": 7000, + "order_weight": 8000, }, ], } @@ -935,13 +936,7 @@ "include_in_menu": False, "generate_profile": False, "attributes": [ - { - "name": "decision", - "kind": "Number", - "enum": PermissionDecision.available_types(), - "default_value": PermissionDecision.ALLOW_ALL.value, - "order_weight": 5000, - }, + {"name": "description", "kind": "Text", "optional": True}, { "name": "identifier", "kind": "Text", @@ -2169,19 +2164,27 @@ "description": "A permission that grants global rights to perform actions in Infrahub", "label": "Global permission", "include_in_menu": False, - "order_by": ["name__value", "action__value"], - "display_labels": ["name__value"], + "order_by": ["action__value", "decision__value"], + "display_labels": ["action__value", "decision__value"], + "human_friendly_id": ["action__value", "decision__value"], "generate_profile": False, "inherit_from": [InfrahubKind.BASEPERMISSION], "branch": BranchSupportType.AGNOSTIC.value, "attributes": [ - {"name": "name", "kind": "Text", "unique": True, "order_weight": 1000}, { "name": "action", "kind": "Dropdown", "choices": [{"name": permission.value} for permission in GlobalPermissions], "order_weight": 2000, }, + { + "name": "decision", + "kind": "Number", + "enum": PermissionDecision.available_types(), + "default_value": PermissionDecision.ALLOW_ALL.value, + "order_weight": 3000, + "description": "Decide to deny or allow the action at a global level", + }, ], }, { @@ -2192,6 +2195,7 @@ "include_in_menu": False, "order_by": ["namespace__value", "name__value", "action__value", "decision__value"], "display_labels": ["namespace__value", "name__value", "action__value", "decision__value"], + "human_friendly_id": ["namespace__value", "name__value", "action__value", "decision__value"], "uniqueness_constraints": [["namespace__value", "name__value", "action__value", "decision__value"]], "generate_profile": False, "inherit_from": [InfrahubKind.BASEPERMISSION], @@ -2205,6 +2209,17 @@ "default_value": PermissionAction.ANY.value, "order_weight": 4000, }, + { + "name": "decision", + "kind": "Number", + "enum": PermissionDecision.available_types(), + "default_value": PermissionDecision.ALLOW_ALL.value, + "order_weight": 5000, + "description": ( + "Decide to deny or allow the action. If allowed, it can be configured for the default branch, any other branches or all " + "branches" + ), + }, ], }, { @@ -2216,6 +2231,7 @@ "include_in_menu": False, "order_by": ["name__value"], "display_labels": ["name__value"], + "human_friendly_id": ["name__value"], "generate_profile": False, "attributes": [{"name": "name", "kind": "Text", "unique": True}], "relationships": [ @@ -2246,8 +2262,9 @@ "include_in_menu": False, "order_by": ["name__value"], "display_labels": ["name__value"], + "human_friendly_id": ["name__value"], "generate_profile": False, - "inherit_from": [InfrahubKind.GENERICGROUP], + "inherit_from": [InfrahubKind.LINEAGEOWNER, InfrahubKind.LINEAGESOURCE, InfrahubKind.GENERICGROUP], "branch": BranchSupportType.AGNOSTIC.value, "relationships": [ { diff --git a/backend/infrahub/core/schema/manager.py b/backend/infrahub/core/schema/manager.py index d2a07a938c..abafd790bf 100644 --- a/backend/infrahub/core/schema/manager.py +++ b/backend/infrahub/core/schema/manager.py @@ -389,82 +389,113 @@ async def update_node_in_db_based_on_diff( # pylint: disable=too-many-branches, new_node = node.duplicate() - # Update the attributes and the relationships nodes as well - if "attributes" in diff.changed: + diff_attributes = diff.changed.get("attributes") + diff_relationships = diff.changed.get("relationships") + attrs_rels_to_update: set[str] = set() + if diff_attributes: + attrs_rels_to_update.update(set(diff_attributes.added.keys())) + attrs_rels_to_update.update(set(diff_attributes.changed.keys())) + attrs_rels_to_update.update(set(diff_attributes.removed.keys())) + if diff_relationships: + attrs_rels_to_update.update(set(diff_relationships.added.keys())) + attrs_rels_to_update.update(set(diff_relationships.changed.keys())) + attrs_rels_to_update.update(set(diff_relationships.removed.keys())) + + item_ids = set() + item_names = set() + for field in node.local_attributes + node.local_relationships: + if field.name not in attrs_rels_to_update: + continue + if field.id: + item_ids.add(field.id) + item_names.add(field.name) + missing_field_names = list(attrs_rels_to_update - item_names) + + items: dict[str, Node] = {} + if item_ids: + items = await self.get_many( + ids=list(item_ids), + db=db, + branch=branch, + include_owner=True, + include_source=True, + ) + if missing_field_names: + missing_attrs = await self.query( + db=db, + branch=branch, + schema=attribute_schema, + filters={"name__values": missing_field_names, "node__id": node.id}, + include_owner=True, + include_source=True, + ) + missing_rels = await self.query( + db=db, + branch=branch, + schema=relationship_schema, + filters={"name__values": missing_field_names, "node__id": node.id}, + include_owner=True, + include_source=True, + ) + items.update({field.id: field for field in missing_attrs + missing_rels}) + + if diff_attributes: await obj.attributes.update(db=db, data=[item.id for item in node.local_attributes if item.id]) - if "relationships" in diff.changed: + if diff_relationships: await obj.relationships.update(db=db, data=[item.id for item in node.local_relationships if item.id]) await obj.save(db=db) - # Then Update the Attributes and the relationships - def get_attrs_rels_to_update(diff: HashableModelDiff) -> list[str]: - items_to_update = [] - if "attributes" in diff.changed.keys() and diff.changed["attributes"]: - items_to_update.extend(list(diff.changed["attributes"].added.keys())) - items_to_update.extend(list(diff.changed["attributes"].changed.keys())) - items_to_update.extend(list(diff.changed["attributes"].removed.keys())) - if "relationships" in diff.changed.keys() and diff.changed["relationships"]: - items_to_update.extend(list(diff.changed["relationships"].added.keys())) - items_to_update.extend(list(diff.changed["relationships"].changed.keys())) - items_to_update.extend(list(diff.changed["relationships"].removed.keys())) - return items_to_update - - attrs_rels_to_update = get_attrs_rels_to_update(diff=diff) - - items = await self.get_many( - ids=[ - item.id - for item in node.local_attributes + node.local_relationships - if item.id and item.name in attrs_rels_to_update - ], - db=db, - branch=branch, - include_owner=True, - include_source=True, - ) - - if "attributes" in diff.changed.keys() and diff.changed["attributes"]: + if diff_attributes: for item in node.local_attributes: - if item.name in diff.changed["attributes"].added: + if item.name in diff_attributes.added: created_item = await self.create_attribute_in_db( schema=attribute_schema, item=item, branch=branch, db=db, parent=obj ) new_attr = new_node.get_attribute(name=item.name) new_attr.id = created_item.id - elif item.name in diff.changed["attributes"].changed and item.id and item.id in items: + elif item.name in diff_attributes.changed and item.id and item.id in items: await self.update_attribute_in_db(item=item, attr=items[item.id], db=db) - elif item.name in diff.changed["attributes"].removed and item.id and item.id in items: + elif item.name in diff_attributes.removed and item.id and item.id in items: await items[item.id].delete(db=db) elif ( - (item.name in diff.changed["attributes"].removed or item.name in diff.changed["attributes"].changed) + (item.name in diff_attributes.removed or item.name in diff_attributes.changed) and item.id and item.id not in items ): raise ValueError(f"Unable to find an attribute {item.name!r} to update or delete") - if "relationships" in diff.changed.keys() and diff.changed["relationships"]: + if diff_relationships: for item in node.local_relationships: - if item.name in diff.changed["relationships"].added: + if item.name in diff_relationships.added: created_rel = await self.create_relationship_in_db( schema=relationship_schema, item=item, branch=branch, db=db, parent=obj ) new_rel = new_node.get_relationship(name=item.name) new_rel.id = created_rel.id - elif item.name in diff.changed["relationships"].changed and item.id and item.id in items: + elif item.name in diff_relationships.changed and item.id and item.id in items: await self.update_relationship_in_db(item=item, rel=items[item.id], db=db) - elif item.name in diff.changed["relationships"].removed and item.id and item.id in items: + elif item.name in diff_relationships.removed and item.id and item.id in items: await items[item.id].delete(db=db) elif ( - ( - item.name in diff.changed["relationships"].removed - or item.name in diff.changed["relationships"].changed - ) + (item.name in diff_relationships.removed or item.name in diff_relationships.changed) and item.id and item.id not in items ): - raise ValueError(f"Unable to find an relationship {item.name!r} to update or delete") + raise ValueError(f"Unable to find a relationship {item.name!r} to update or delete") + + field_names_to_remove = [] + if diff_attributes and diff_attributes.removed: + attr_names_to_remove = set(diff_attributes.removed.keys()) - set(node.local_attribute_names) + field_names_to_remove.extend(list(attr_names_to_remove)) + if diff_relationships and diff_relationships.removed: + rel_names_to_remove = set(diff_relationships.removed.keys()) - set(node.local_relationship_names) + field_names_to_remove.extend(list(rel_names_to_remove)) + if field_names_to_remove: + for field_schema in items.values(): + if field_schema.name.value in field_names_to_remove: + await field_schema.delete(db=db) # Save back the node with the (potentially) newly created IDs in the SchemaManager self.set(name=new_node.kind, schema=new_node, branch=branch.name) @@ -560,7 +591,6 @@ async def load_schema( branch_schema = await self.load_schema_from_db( db=db, branch=branch, schema=current_schema, schema_diff=schema_diff ) - branch_schema.clear_cache() self.set_schema_branch(name=branch.name, schema=branch_schema) return branch_schema diff --git a/backend/infrahub/core/schema/schema_branch.py b/backend/infrahub/core/schema/schema_branch.py index 5d52e13dad..d1a2127212 100644 --- a/backend/infrahub/core/schema/schema_branch.py +++ b/backend/infrahub/core/schema/schema_branch.py @@ -45,7 +45,6 @@ from infrahub.core.schema.definitions.core import core_profile_schema_definition from infrahub.core.validators import CONSTRAINT_VALIDATOR_MAP from infrahub.exceptions import SchemaNotFoundError, ValidationError -from infrahub.graphql.manager import GraphQLSchemaManager from infrahub.log import get_logger from infrahub.types import ATTRIBUTE_TYPES from infrahub.utils import format_label @@ -56,7 +55,6 @@ log = get_logger() if TYPE_CHECKING: - from graphql import GraphQLSchema from pydantic import ValidationInfo @@ -70,8 +68,6 @@ def __init__(self, cache: dict, name: str | None = None, data: dict[str, dict[st self.nodes: dict[str, str] = {} self.generics: dict[str, str] = {} self.profiles: dict[str, str] = {} - self._graphql_schema: Optional[GraphQLSchema] = None - self._graphql_manager: Optional[GraphQLSchemaManager] = None if data: self.nodes = data.get("nodes", {}) @@ -122,7 +118,7 @@ def all_names(self) -> list[str]: def get_hash(self) -> str: """Calculate the hash for this objects based on the content of nodes and generics. - Since the object themselves are considered immuable we just need to use the hash from each object to calculate the global hash. + Since the object themselves are considered immutable we just need to use the hash from each object to calculate the global hash. """ md5hash = hashlib.md5(usedforsecurity=False) for key, value in sorted(tuple(self.nodes.items()) + tuple(self.generics.items())): @@ -165,31 +161,6 @@ def from_dict_schema_object(cls, data: dict) -> Self: return cls(cache=cache, data=nodes) - def clear_cache(self) -> None: - self._graphql_manager = None - self._graphql_schema = None - - def get_graphql_manager(self) -> GraphQLSchemaManager: - if not self._graphql_manager: - self._graphql_manager = GraphQLSchemaManager(schema=self) - return self._graphql_manager - - def get_graphql_schema( - self, - include_query: bool = True, - include_mutation: bool = True, - include_subscription: bool = True, - include_types: bool = True, - ) -> GraphQLSchema: - if not self._graphql_schema: - self._graphql_schema = self.get_graphql_manager().generate( - include_query=include_query, - include_mutation=include_mutation, - include_subscription=include_subscription, - include_types=include_types, - ) - return self._graphql_schema - def diff(self, other: SchemaBranch) -> SchemaDiff: # Identify the nodes or generics that have been added or removed local_kind_id_map = self.get_all_kind_id_map(exclude_profiles=True) @@ -500,6 +471,7 @@ def process_validate(self) -> None: self.validate_required_relationships() def process_post_validation(self) -> None: + self.cleanup_inherited_elements() self.add_groups() self.add_hierarchy() self.generate_weight() @@ -659,7 +631,13 @@ def sync_uniqueness_constraints_and_unique_attributes(self) -> None: if len(constraint_paths) > 1: continue constraint_path = constraint_paths[0] - schema_attribute_path = node_schema.parse_schema_path(path=constraint_path, schema=self) + try: + schema_attribute_path = node_schema.parse_schema_path(path=constraint_path, schema=self) + except AttributePathParsingError as exc: + raise ValueError( + f"{node_schema.kind}: Requested unique constraint not found within node. (`{constraint_path}`)" + ) from exc + if ( schema_attribute_path.is_type_attribute and schema_attribute_path.attribute_property_name == "value" @@ -1296,6 +1274,50 @@ def generate_weight(self) -> None: self.set(name=name, schema=node) + def cleanup_inherited_elements(self) -> None: + # pylint: disable=too-many-branches + for name in self.node_names: + node = self.get_node(name=name, duplicate=False) + + attributes_to_delete = [] + relationships_to_delete = [] + + inherited_attribute_names = set(node.attribute_names) - set(node.local_attribute_names) + inherited_relationship_names = set(node.relationship_names) - set(node.local_relationship_names) + for item_name in inherited_attribute_names: + found = False + for generic_name in node.inherit_from: + generic = self.get_generic(name=generic_name, duplicate=False) + if item_name in generic.attribute_names: + attr = generic.get_attribute(name=item_name) + if attr.state != HashableModelState.ABSENT: + found = True + if not found: + attributes_to_delete.append(item_name) + + for item_name in inherited_relationship_names: + found = False + for generic_name in node.inherit_from: + generic = self.get_generic(name=generic_name, duplicate=False) + if item_name in generic.relationship_names: + rel = generic.get_relationship(name=item_name) + if rel.state != HashableModelState.ABSENT: + found = True + if not found: + relationships_to_delete.append(item_name) + + # If there is either an attribute or a relationship to delete + # We clone the node and we set the attribute / relationship as ABSENT + if attributes_to_delete or relationships_to_delete: + node_copy = self.get_node(name=name, duplicate=True) + for item_name in attributes_to_delete: + attr = node_copy.get_attribute(name=item_name) + attr.state = HashableModelState.ABSENT + for item_name in relationships_to_delete: + rel = node_copy.get_relationship(name=item_name) + rel.state = HashableModelState.ABSENT + self.set(name=name, schema=node_copy) + def add_groups(self) -> None: if not self.has(name=InfrahubKind.GENERICGROUP): return diff --git a/backend/infrahub/core/utils.py b/backend/infrahub/core/utils.py index 239d542944..00743d7506 100644 --- a/backend/infrahub/core/utils.py +++ b/backend/infrahub/core/utils.py @@ -132,12 +132,10 @@ async def count_relationships(db: InfrahubDatabase, label: Optional[str] = None) async def get_nodes(db: InfrahubDatabase, label: str) -> list[Neo4jNode]: """Return theall nodes of a given label in the database.""" query = """ - MATCH (node) - WHERE $label IN LABELS(node) + MATCH (node:%(node_kind)s) RETURN node - """ - params: dict = {"label": label} - results = await db.execute_query(query=query, params=params, name="get_nodes") + """ % {"node_kind": label} + results = await db.execute_query(query=query, name="get_nodes") return [result[0] for result in results] diff --git a/backend/infrahub/core/validators/attribute/choices.py b/backend/infrahub/core/validators/attribute/choices.py index b269723aac..3f72c9617a 100644 --- a/backend/infrahub/core/validators/attribute/choices.py +++ b/backend/infrahub/core/validators/attribute/choices.py @@ -25,14 +25,12 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) self.params.update(branch_params) - self.params["node_kind"] = self.node_schema.kind self.params["attr_name"] = self.attribute_schema.name self.params["allowed_values"] = [choice.name for choice in self.attribute_schema.choices] self.params["null_value"] = NULL_VALUE query = """ - MATCH p = (n:Node) - WHERE $node_kind IN LABELS(n) + MATCH p = (n:%(node_kind)s) CALL { WITH n MATCH path = (root:Root)<-[rr:IS_PART_OF]-(n)-[ra:HAS_ATTRIBUTE]-(:Attribute { name: $attr_name } )-[rv:HAS_VALUE]-(av:AttributeValue) @@ -50,7 +48,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No AND attribute_value IS NOT NULL AND attribute_value <> $null_value AND NOT (attribute_value IN $allowed_values) - """ % {"branch_filter": branch_filter} + """ % {"branch_filter": branch_filter, "node_kind": self.node_schema.kind} self.add_to_query(query) self.return_labels = ["node.uuid", "attribute_value", "value_relationship"] diff --git a/backend/infrahub/core/validators/attribute/enum.py b/backend/infrahub/core/validators/attribute/enum.py index 139fba2c26..8736d2b2b3 100644 --- a/backend/infrahub/core/validators/attribute/enum.py +++ b/backend/infrahub/core/validators/attribute/enum.py @@ -25,13 +25,11 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) self.params.update(branch_params) - self.params["node_kind"] = self.node_schema.kind self.params["attr_name"] = self.attribute_schema.name self.params["allowed_values"] = self.attribute_schema.enum self.params["null_value"] = NULL_VALUE query = """ - MATCH p = (n:Node) - WHERE $node_kind IN LABELS(n) + MATCH (n:%(node_kind)s) CALL { WITH n MATCH path = (root:Root)<-[rr:IS_PART_OF]-(n)-[ra:HAS_ATTRIBUTE]-(:Attribute { name: $attr_name } )-[rv:HAS_VALUE]-(av:AttributeValue) @@ -49,7 +47,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No AND attribute_value IS NOT NULL AND attribute_value <> $null_value AND NOT (attribute_value IN $allowed_values) - """ % {"branch_filter": branch_filter} + """ % {"branch_filter": branch_filter, "node_kind": self.node_schema.kind} self.add_to_query(query) self.return_labels = ["node.uuid", "attribute_value", "value_relationship"] diff --git a/backend/infrahub/core/validators/attribute/kind.py b/backend/infrahub/core/validators/attribute/kind.py index d00bc2f22a..d12366ec6f 100644 --- a/backend/infrahub/core/validators/attribute/kind.py +++ b/backend/infrahub/core/validators/attribute/kind.py @@ -32,13 +32,11 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) self.params.update(branch_params) - self.params["node_kind"] = self.node_schema.kind self.params["attr_name"] = self.attribute_schema.name self.params["null_value"] = NULL_VALUE query = """ - MATCH p = (n:Node) - WHERE $node_kind IN LABELS(n) + MATCH p = (n:%(node_kind)s) CALL { WITH n MATCH path = (root:Root)<-[rr:IS_PART_OF]-(n)-[ra:HAS_ATTRIBUTE]-(:Attribute { name: $attr_name } )-[rv:HAS_VALUE]-(av:AttributeValue) @@ -55,7 +53,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No WHERE all(r in relationships(full_path) WHERE r.status = "active") AND attribute_value IS NOT NULL AND attribute_value <> $null_value - """ % {"branch_filter": branch_filter} + """ % {"branch_filter": branch_filter, "node_kind": self.node_schema.kind} self.add_to_query(query) self.return_labels = ["node.uuid", "attribute_value", "value_relationship.branch as value_branch"] diff --git a/backend/infrahub/core/validators/attribute/length.py b/backend/infrahub/core/validators/attribute/length.py index 822b0d4dd9..3b4526e4e6 100644 --- a/backend/infrahub/core/validators/attribute/length.py +++ b/backend/infrahub/core/validators/attribute/length.py @@ -22,14 +22,12 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) self.params.update(branch_params) - self.params["node_kind"] = self.node_schema.kind self.params["attr_name"] = self.attribute_schema.name self.params["min_length"] = self.attribute_schema.min_length self.params["max_length"] = self.attribute_schema.max_length query = """ - MATCH p = (n:Node) - WHERE $node_kind IN LABELS(n) + MATCH (n:%(node_kind)s) CALL { WITH n MATCH path = (root:Root)<-[rr:IS_PART_OF]-(n)-[ra:HAS_ATTRIBUTE]-(:Attribute { name: $attr_name } )-[rv:HAS_VALUE]-(av:AttributeValue) @@ -48,7 +46,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No (toInteger($min_length) IS NOT NULL AND size(attribute_value) < toInteger($min_length)) OR (toInteger($max_length) IS NOT NULL AND size(attribute_value) > toInteger($max_length)) ) - """ % {"branch_filter": branch_filter} + """ % {"branch_filter": branch_filter, "node_kind": self.node_schema.kind} self.add_to_query(query) self.return_labels = ["node.uuid", "value_relationship", "attribute_value"] diff --git a/backend/infrahub/core/validators/attribute/optional.py b/backend/infrahub/core/validators/attribute/optional.py index 03670a42c3..5c0c46461a 100644 --- a/backend/infrahub/core/validators/attribute/optional.py +++ b/backend/infrahub/core/validators/attribute/optional.py @@ -22,13 +22,11 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) self.params.update(branch_params) - self.params["node_kind"] = self.node_schema.kind self.params["attr_name"] = self.attribute_schema.name self.params["null_value"] = NULL_VALUE query = """ - MATCH p = (n:Node) - WHERE $node_kind IN LABELS(n) + MATCH (n:%(node_kind)s) CALL { WITH n MATCH path = (root:Root)<-[rr:IS_PART_OF]-(n)-[ra:HAS_ATTRIBUTE]-(:Attribute { name: $attr_name } )-[rv:HAS_VALUE]-(av:AttributeValue) @@ -44,7 +42,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No WITH full_path, node, attribute_value, value_relationship WHERE all(r in relationships(full_path) WHERE r.status = "active") AND (attribute_value IS NULL OR attribute_value = $null_value) - """ % {"branch_filter": branch_filter} + """ % {"branch_filter": branch_filter, "node_kind": self.node_schema.kind} self.add_to_query(query) self.return_labels = ["node.uuid", "value_relationship"] diff --git a/backend/infrahub/core/validators/attribute/regex.py b/backend/infrahub/core/validators/attribute/regex.py index 008409050b..282b158b35 100644 --- a/backend/infrahub/core/validators/attribute/regex.py +++ b/backend/infrahub/core/validators/attribute/regex.py @@ -22,13 +22,11 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) self.params.update(branch_params) - self.params["node_kind"] = self.node_schema.kind self.params["attr_name"] = self.attribute_schema.name self.params["attr_value_regex"] = self.attribute_schema.regex self.params["null_value"] = NULL_VALUE query = """ - MATCH p = (n:Node) - WHERE $node_kind IN LABELS(n) + MATCH p = (n:%(node_kind)s) CALL { WITH n MATCH path = (root:Root)<-[rr:IS_PART_OF]-(n)-[ra:HAS_ATTRIBUTE]-(:Attribute { name: $attr_name } )-[rv:HAS_VALUE]-(av:AttributeValue) @@ -45,7 +43,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No WHERE all(r in relationships(full_path) WHERE r.status = "active") AND attribute_value <> $null_value AND NOT attribute_value =~ $attr_value_regex - """ % {"branch_filter": branch_filter} + """ % {"branch_filter": branch_filter, "node_kind": self.node_schema.kind} self.add_to_query(query) self.return_labels = ["node.uuid", "attribute_value", "value_relationship"] diff --git a/backend/infrahub/core/validators/determiner.py b/backend/infrahub/core/validators/determiner.py index 2b5c4566d1..82022d2d35 100644 --- a/backend/infrahub/core/validators/determiner.py +++ b/backend/infrahub/core/validators/determiner.py @@ -1,11 +1,8 @@ -from collections import defaultdict from typing import Union -from infrahub_sdk.diff import NodeDiff - from infrahub.core.constants import RelationshipKind, SchemaPathType from infrahub.core.constants.schema import UpdateSupport -from infrahub.core.diff.model.diff import DiffElementType +from infrahub.core.diff.model.path import NodeDiffFieldSummary from infrahub.core.models import SchemaUpdateConstraintInfo from infrahub.core.path import SchemaPath from infrahub.core.schema import AttributeSchema, MainSchemaTypes @@ -20,43 +17,30 @@ class ConstraintValidatorDeterminer: def __init__(self, schema_branch: SchemaBranch) -> None: self.schema_branch = schema_branch - self._node_diffs_by_kind: dict[str, NodeDiff] = defaultdict(list) - self._attribute_element_map: dict[str, dict[str, list[NodeDiff]]] = {} - self._relationship_element_map: dict[str, dict[str, list[NodeDiff]]] = {} + self._node_kinds: set[str] = set() + self._attribute_element_map: dict[str, set[str]] = {} + self._relationship_element_map: dict[str, set[str]] = {} - def _index_node_diffs(self, node_diffs: list[NodeDiff]) -> None: + def _index_node_diffs(self, node_diffs: list[NodeDiffFieldSummary]) -> None: for node_diff in node_diffs: - node_kind = node_diff["kind"] - self._node_diffs_by_kind[node_kind].append(node_diff) - if node_kind not in self._attribute_element_map: - self._attribute_element_map[node_kind] = {} - for element in node_diff["elements"]: - element_name = element["name"] - element_type = element["element_type"] - if element_type.lower() in ( - DiffElementType.RELATIONSHIP_MANY.value.lower(), - DiffElementType.RELATIONSHIP_ONE.value.lower(), - ): - if node_kind not in self._relationship_element_map: - self._relationship_element_map[node_kind] = {} - if element_name not in self._relationship_element_map[node_kind]: - self._relationship_element_map[node_kind][element_name] = [] - self._relationship_element_map[node_kind][element_name].append(element) - elif element_type.lower() in (DiffElementType.ATTRIBUTE.value.lower(),): - if node_kind not in self._attribute_element_map: - self._attribute_element_map[node_kind] = {} - if element_name not in self._attribute_element_map[node_kind]: - self._attribute_element_map[node_kind][element_name] = [] - self._attribute_element_map[node_kind][element_name].append(element) - - def _get_attribute_diffs(self, kind: str, name: str) -> list[NodeDiff]: - return self._attribute_element_map.get(kind, {}).get(name, []) - - def _get_relationship_diffs(self, kind: str, name: str) -> list[NodeDiff]: - return self._relationship_element_map.get(kind, {}).get(name, []) + self._node_kinds.add(node_diff.kind) + if node_diff.kind not in self._attribute_element_map: + self._attribute_element_map[node_diff.kind] = set() + for attribute_name in node_diff.attribute_names: + self._attribute_element_map[node_diff.kind].add(attribute_name) + if node_diff.kind not in self._relationship_element_map: + self._relationship_element_map[node_diff.kind] = set() + for relationship_name in node_diff.relationship_names: + self._relationship_element_map[node_diff.kind].add(relationship_name) + + def _has_attribute_diff(self, kind: str, name: str) -> bool: + return name in self._attribute_element_map.get(kind, set()) + + def _has_relationship_diff(self, kind: str, name: str) -> bool: + return name in self._relationship_element_map.get(kind, set()) async def get_constraints( - self, node_diffs: list[NodeDiff], filter_invalid: bool = True + self, node_diffs: list[NodeDiffFieldSummary], filter_invalid: bool = True ) -> list[SchemaUpdateConstraintInfo]: self._index_node_diffs(node_diffs) constraints: list[SchemaUpdateConstraintInfo] = [] @@ -65,7 +49,7 @@ async def get_constraints( constraints.extend(await self._get_all_property_constraints()) - for kind in self._node_diffs_by_kind.keys(): + for kind in self._node_kinds: schema = self.schema_branch.get(name=kind, duplicate=False) constraints.extend(await self._get_constraints_for_one_schema(schema)) @@ -132,11 +116,9 @@ async def _get_attribute_constraints_for_one_schema( ) -> list[SchemaUpdateConstraintInfo]: constraints: list[SchemaUpdateConstraintInfo] = [] for field_name in schema.attribute_names: - node_diffs_for_attribute = self._get_attribute_diffs(kind=schema.kind, name=field_name) - if not node_diffs_for_attribute: - continue - field = schema.get_attribute(field_name) - constraints.extend(await self._get_constraints_for_one_field(schema=schema, field=field)) + if self._has_attribute_diff(kind=schema.kind, name=field_name): + field = schema.get_attribute(field_name) + constraints.extend(await self._get_constraints_for_one_field(schema=schema, field=field)) return constraints async def _get_relationship_constraints_for_one_schema( @@ -144,11 +126,9 @@ async def _get_relationship_constraints_for_one_schema( ) -> list[SchemaUpdateConstraintInfo]: constraints: list[SchemaUpdateConstraintInfo] = [] for field_name in schema.relationship_names: - node_diffs_for_relationship = self._get_relationship_diffs(kind=schema.kind, name=field_name) - if not node_diffs_for_relationship: - continue - field = schema.get_relationship(field_name) - constraints.extend(await self._get_constraints_for_one_field(schema=schema, field=field)) + if self._has_relationship_diff(kind=schema.kind, name=field_name): + field = schema.get_relationship(field_name) + constraints.extend(await self._get_constraints_for_one_field(schema=schema, field=field)) return constraints async def _get_constraints_for_one_field( diff --git a/backend/infrahub/core/validators/node/hierarchy.py b/backend/infrahub/core/validators/node/hierarchy.py index a95a7c8f66..1ced350a97 100644 --- a/backend/infrahub/core/validators/node/hierarchy.py +++ b/backend/infrahub/core/validators/node/hierarchy.py @@ -52,7 +52,6 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string(), is_isolated=False) self.params.update(branch_params) - self.params["node_kind"] = self.node_schema.kind if hierarchy := getattr(self.node_schema, "hierarchy", None): self.params["hierarchy_kind"] = hierarchy else: @@ -61,8 +60,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No # ruff: noqa: E501 query = """ - MATCH (n:Node) - WHERE $node_kind IN LABELS(n) + MATCH (n:%(node_kind)s) CALL { WITH n MATCH path = (root:Root)<-[rroot:IS_PART_OF]-(n) @@ -117,7 +115,12 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No any(r in relationships(current_path) WHERE r.hierarchy <> $hierarchy_kind) OR NOT ($peer_kind IN labels(current_peer)) ) - """ % {"branch_filter": branch_filter, "to_children": to_children, "to_parent": to_parent} + """ % { + "branch_filter": branch_filter, + "to_children": to_children, + "to_parent": to_parent, + "node_kind": self.node_schema.kind, + } self.add_to_query(query) self.return_labels = ["start_node.uuid", "branch_name", "current_peer.uuid"] diff --git a/backend/infrahub/core/validators/relationship/count.py b/backend/infrahub/core/validators/relationship/count.py index 85ab9f1d29..08044413c4 100644 --- a/backend/infrahub/core/validators/relationship/count.py +++ b/backend/infrahub/core/validators/relationship/count.py @@ -34,20 +34,22 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string(), is_isolated=False) self.params.update(branch_params) - self.params["node_kind"] = self.node_schema.kind self.params["relationship_id"] = self.relationship_schema.identifier + self.params["relationship_direction"] = self.relationship_schema.direction.value self.params["min_count"] = ( self.min_count_override if self.min_count_override is not None else self.relationship_schema.min_count ) - self.params["max_count"] = ( - self.max_count_override if self.max_count_override is not None else self.relationship_schema.max_count - ) + max_count: int | None = self.relationship_schema.max_count + if self.max_count_override: + max_count = self.max_count_override + if max_count == 0: + max_count = None + self.params["max_count"] = max_count # ruff: noqa: E501 query = """ // get the nodes on these branches nodes - MATCH (n:Node) - WHERE $node_kind IN LABELS(n) + MATCH (n:%(node_kind)s) CALL { WITH n MATCH path = (root:Root)<-[rroot:IS_PART_OF]-(n) @@ -64,7 +66,9 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No CALL { WITH active_node MATCH path = (active_node)-[rrel1:IS_RELATED]-(rel:Relationship { name: $relationship_id })-[rrel2:IS_RELATED]-(peer:Node) - WHERE all( + WHERE ($relationship_direction <> "outbound" OR (startNode(rrel1) = active_node AND startNode(rrel2) = rel)) + AND ($relationship_direction <> "inbound" OR (startNode(rrel1) = rel AND startNode(rrel2) = peer)) + AND all( r in relationships(path) WHERE (%(branch_filter)s) ) @@ -115,7 +119,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No } // return a row for each node-branch combination with a count for that branch UNWIND violation_branches_and_counts as violation_branch_and_count - """ % {"branch_filter": branch_filter} + """ % {"branch_filter": branch_filter, "node_kind": self.node_schema.kind} self.add_to_query(query) self.return_labels = [ diff --git a/backend/infrahub/core/validators/relationship/optional.py b/backend/infrahub/core/validators/relationship/optional.py index 24dacb50a6..2b9c438e9f 100644 --- a/backend/infrahub/core/validators/relationship/optional.py +++ b/backend/infrahub/core/validators/relationship/optional.py @@ -24,14 +24,12 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string(), is_isolated=False) self.params.update(branch_params) - self.params["node_kind"] = self.node_schema.kind self.params["relationship_id"] = self.relationship_schema.identifier query = """ // Query all Active Nodes of type // and store their UUID in uuids_active_node - MATCH (n:Node) - WHERE $node_kind IN LABELS(n) + MATCH (n:%(node_kind)s) CALL { WITH n MATCH (root:Root)<-[r:IS_PART_OF]-(n) @@ -45,8 +43,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No WITH COLLECT(active_node.uuid) AS uuids_active_node // identifier all nodes with at least one active member for this relationship // and store their UUID in uuids_with_rel - MATCH (n:Node) - WHERE $node_kind IN LABELS(n) + MATCH (n:%(node_kind)s) CALL { WITH n, uuids_active_node MATCH path = (n)-[r:IS_RELATED]-(:Relationship { name: $relationship_id }) @@ -58,12 +55,11 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No WITH n1 as node_with_rel, r1 as r, uuids_active_node WHERE r.status = "active" WITH COLLECT(node_with_rel.uuid) AS uuids_with_rel, uuids_active_node - MATCH (n:Node)-[r:IS_PART_OF]->(:Root) - WHERE $node_kind IN LABELS(n) - AND n.uuid IN uuids_active_node + MATCH (n:%(node_kind)s)-[r:IS_PART_OF]->(:Root) + WHERE n.uuid IN uuids_active_node AND not n.uuid IN uuids_with_rel AND NOT exists((n)-[:IS_RELATED]-(:Relationship { name: $relationship_id })) - """ % {"branch_filter": branch_filter} + """ % {"branch_filter": branch_filter, "node_kind": self.node_schema.kind} self.add_to_query(query) self.return_labels = ["n.uuid", "r as root_relationship"] diff --git a/backend/infrahub/core/validators/relationship/peer.py b/backend/infrahub/core/validators/relationship/peer.py index 7ff63d069f..c21d388de0 100644 --- a/backend/infrahub/core/validators/relationship/peer.py +++ b/backend/infrahub/core/validators/relationship/peer.py @@ -30,14 +30,12 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string(), is_isolated=False) self.params.update(branch_params) - self.params["node_kind"] = self.node_schema.kind self.params["relationship_id"] = self.relationship_schema.identifier self.params["allowed_peer_kinds"] = allowed_peer_kinds # ruff: noqa: E501 query = """ - MATCH (n:Node) - WHERE $node_kind IN LABELS(n) + MATCH (n:%(node_kind)s) CALL { WITH n MATCH path = (root:Root)<-[rroot:IS_PART_OF]-(n) @@ -85,7 +83,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No WITH start_node, current_peer, branch_name, current_path WHERE all(r in relationships(current_path) WHERE r.status = "active") AND NOT any(label IN LABELS(current_peer) WHERE label IN $allowed_peer_kinds) - """ % {"branch_filter": branch_filter} + """ % {"branch_filter": branch_filter, "node_kind": self.node_schema.kind} self.add_to_query(query) self.return_labels = ["start_node.uuid", "branch_name", "current_peer.uuid"] diff --git a/backend/infrahub/core/validators/tasks.py b/backend/infrahub/core/validators/tasks.py index 8ba163eaf0..e80055a745 100644 --- a/backend/infrahub/core/validators/tasks.py +++ b/backend/infrahub/core/validators/tasks.py @@ -8,15 +8,17 @@ ) from infrahub.message_bus.operations.schema.validator import schema_path_validate from infrahub.services import services +from infrahub.workflows.utils import add_branch_tag from .models.validate_migration import SchemaValidateMigrationData # noqa: TCH001 -@flow +@flow(name="schema-migrations-validate") async def schema_validate_migrations(message: SchemaValidateMigrationData) -> list[str]: batch = InfrahubBatch(return_exceptions=True) error_messages: list[str] = [] service = services.service + await add_branch_tag(branch_name=message.branch.name) if not message.constraints: return error_messages diff --git a/backend/infrahub/core/validators/uniqueness/query.py b/backend/infrahub/core/validators/uniqueness/query.py index c7ab8fbf11..388c561d45 100644 --- a/backend/infrahub/core/validators/uniqueness/query.py +++ b/backend/infrahub/core/validators/uniqueness/query.py @@ -26,6 +26,9 @@ def __init__( self.min_count_required = min_count_required super().__init__(**kwargs) + def get_context(self) -> dict[str, str]: + return {"kind": self.query_request.kind} + async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string(), is_isolated=False) self.params.update(branch_params) diff --git a/backend/infrahub/database/__init__.py b/backend/infrahub/database/__init__.py index 6596d7e29f..86a7ec5b5b 100644 --- a/backend/infrahub/database/__init__.py +++ b/backend/infrahub/database/__init__.py @@ -297,31 +297,57 @@ async def close(self) -> None: await self._driver.close() async def execute_query( - self, query: str, params: Optional[dict[str, Any]] = None, name: Optional[str] = "undefined" + self, + query: str, + params: dict[str, Any] | None = None, + name: str = "undefined", + context: dict[str, str] | None = None, ) -> list[Record]: - results, _ = await self.execute_query_with_metadata(query=query, params=params, name=name) + results, _ = await self.execute_query_with_metadata(query=query, params=params, name=name, context=context) return results async def execute_query_with_metadata( - self, query: str, params: Optional[dict[str, Any]] = None, name: Optional[str] = "undefined" + self, + query: str, + params: dict[str, Any] | None = None, + name: str = "undefined", + context: dict[str, str] | None = None, ) -> tuple[list[Record], dict[str, Any]]: with trace.get_tracer(__name__).start_as_current_span("execute_db_query_with_metadata") as span: span.set_attribute("query", query) if name: span.set_attribute("query_name", name) + runtime = Neo4jRuntime.UNDEFINED + try: query_config = self.queries_names_to_config[name] if self.db_type == DatabaseType.NEO4J: runtime = self.queries_names_to_config[name].neo4j_runtime - if runtime != Neo4jRuntime.DEFAULT: + if runtime not in [Neo4jRuntime.DEFAULT, Neo4jRuntime.UNDEFINED]: query = f"CYPHER runtime = {runtime.value}\n" + query if query_config.profile_memory: query = "PROFILE\n" + query except KeyError: pass # No specific config for this query - with QUERY_EXECUTION_METRICS.labels(self._session_mode.value, name).time(): + labels = { + "type": self._session_mode.value, + "query": name, + "runtime": runtime.value, + "context1": "", + "context2": "", + } + if context: + labels.update( + { + f"context{idx + 1}": f"{key}__{value}" + for idx, (key, value) in enumerate(context.items()) + if idx <= 1 + } + ) + + with QUERY_EXECUTION_METRICS.labels(**labels).time(): response = await self.run_query(query=query, params=params, name=name) results = [item async for item in response] return results, response._metadata or {} diff --git a/backend/infrahub/database/constants.py b/backend/infrahub/database/constants.py index dea9cd05e0..42897163a0 100644 --- a/backend/infrahub/database/constants.py +++ b/backend/infrahub/database/constants.py @@ -12,6 +12,7 @@ class Neo4jRuntime(str, Enum): SLOTTED = "slotted" PIPELINED = "pipelined" PARALLEL = "parallel" + UNDEFINED = "undefined" class IndexType(str, Enum): diff --git a/backend/infrahub/database/metrics.py b/backend/infrahub/database/metrics.py index d7cf2d0772..d9e4ba2e2a 100644 --- a/backend/infrahub/database/metrics.py +++ b/backend/infrahub/database/metrics.py @@ -7,7 +7,7 @@ QUERY_EXECUTION_METRICS = Histogram( f"{METRIC_PREFIX}_query_execution_seconds", "Execution time to query the database", - labelnames=["type", "query"], + labelnames=["type", "query", "runtime", "context1", "context2"], buckets=[0.005, 0.01, 0.02, 0.03, 0.04, 0.05, 0.1, 0.5, 1], ) diff --git a/backend/infrahub/generators/__init__.py b/backend/infrahub/generators/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/infrahub/message_bus/messages/request_generator_run.py b/backend/infrahub/generators/models.py similarity index 90% rename from backend/infrahub/message_bus/messages/request_generator_run.py rename to backend/infrahub/generators/models.py index e76d736bfe..72772e3bdb 100644 --- a/backend/infrahub/message_bus/messages/request_generator_run.py +++ b/backend/infrahub/generators/models.py @@ -1,12 +1,11 @@ from typing import Optional -from pydantic import Field +from pydantic import BaseModel, Field -from infrahub.message_bus import InfrahubMessage from infrahub.message_bus.types import ProposedChangeGeneratorDefinition -class RequestGeneratorRun(InfrahubMessage): +class RequestGeneratorRun(BaseModel): """Runs a generator.""" generator_definition: ProposedChangeGeneratorDefinition = Field(..., description="The Generator definition") diff --git a/backend/infrahub/message_bus/operations/requests/generator.py b/backend/infrahub/generators/tasks.py similarity index 60% rename from backend/infrahub/message_bus/operations/requests/generator.py rename to backend/infrahub/generators/tasks.py index 3466e37910..ea269cc760 100644 --- a/backend/infrahub/message_bus/operations/requests/generator.py +++ b/backend/infrahub/generators/tasks.py @@ -4,42 +4,44 @@ from infrahub_sdk.node import InfrahubNode from infrahub_sdk.protocols import CoreGeneratorInstance from infrahub_sdk.schema import InfrahubGeneratorDefinitionConfig -from prefect import flow +from prefect import flow, task from infrahub import lock from infrahub.core.constants import GeneratorInstanceStatus +from infrahub.generators.models import RequestGeneratorRun from infrahub.git.base import extract_repo_file_information from infrahub.git.repository import get_initialized_repo -from infrahub.message_bus import messages -from infrahub.services import InfrahubServices +from infrahub.services import InfrahubServices, services @flow(name="generator-run") -async def run(message: messages.RequestGeneratorRun, service: InfrahubServices) -> None: +async def run_generator(model: RequestGeneratorRun) -> None: + service = services.service + repository = await get_initialized_repo( - repository_id=message.repository_id, - name=message.repository_name, + repository_id=model.repository_id, + name=model.repository_name, service=service, - repository_kind=message.repository_kind, + repository_kind=model.repository_kind, ) generator_definition = InfrahubGeneratorDefinitionConfig( - name=message.generator_definition.definition_name, - class_name=message.generator_definition.class_name, - file_path=message.generator_definition.file_path, - query=message.generator_definition.query_name, - targets=message.generator_definition.group_id, - convert_query_response=message.generator_definition.convert_query_response, + name=model.generator_definition.definition_name, + class_name=model.generator_definition.class_name, + file_path=model.generator_definition.file_path, + query=model.generator_definition.query_name, + targets=model.generator_definition.group_id, + convert_query_response=model.generator_definition.convert_query_response, ) - commit_worktree = repository.get_commit_worktree(commit=message.commit) + commit_worktree = repository.get_commit_worktree(commit=model.commit) file_info = extract_repo_file_information( full_filename=os.path.join(commit_worktree.directory, generator_definition.file_path.as_posix()), repo_directory=repository.directory_root, worktree_directory=commit_worktree.directory, ) - generator_instance = await _define_instance(message=message, service=service) + generator_instance = await _define_instance(model=model, service=service) try: generator_class = generator_definition.load_class( @@ -49,8 +51,8 @@ async def run(message: messages.RequestGeneratorRun, service: InfrahubServices) generator = generator_class( query=generator_definition.query, client=service.client, - branch=message.branch_name, - params=message.variables, + branch=model.branch_name, + params=model.variables, generator_instance=generator_instance.id, convert_query_response=generator_definition.convert_query_response, infrahub_node=InfrahubNode, @@ -65,23 +67,24 @@ async def run(message: messages.RequestGeneratorRun, service: InfrahubServices) await generator_instance.update(do_full_update=True) -async def _define_instance(message: messages.RequestGeneratorRun, service: InfrahubServices) -> CoreGeneratorInstance: - if message.generator_instance: +@task +async def _define_instance(model: RequestGeneratorRun, service: InfrahubServices) -> CoreGeneratorInstance: + if model.generator_instance: instance = await service.client.get( - kind=CoreGeneratorInstance, id=message.generator_instance, branch=message.branch_name + kind=CoreGeneratorInstance, id=model.generator_instance, branch=model.branch_name ) instance.status.value = GeneratorInstanceStatus.PENDING.value await instance.update(do_full_update=True) else: async with lock.registry.get( - f"{message.target_id}-{message.generator_definition.definition_id}", namespace="generator" + f"{model.target_id}-{model.generator_definition.definition_id}", namespace="generator" ): instances = await service.client.filters( kind=CoreGeneratorInstance, - definition__ids=[message.generator_definition.definition_id], - object__ids=[message.target_id], - branch=message.branch_name, + definition__ids=[model.generator_definition.definition_id], + object__ids=[model.target_id], + branch=model.branch_name, ) if instances: instance = instances[0] @@ -90,12 +93,12 @@ async def _define_instance(message: messages.RequestGeneratorRun, service: Infra else: instance = await service.client.create( kind=CoreGeneratorInstance, - branch=message.branch_name, + branch=model.branch_name, data={ - "name": f"{message.generator_definition.definition_name}: {message.target_name}", + "name": f"{model.generator_definition.definition_name}: {model.target_name}", "status": GeneratorInstanceStatus.PENDING.value, - "object": message.target_id, - "definition": message.generator_definition.definition_id, + "object": model.target_id, + "definition": model.generator_definition.definition_id, }, ) await instance.save() diff --git a/backend/infrahub/git/integrator.py b/backend/infrahub/git/integrator.py index 4870dd49f4..234b8b8aed 100644 --- a/backend/infrahub/git/integrator.py +++ b/backend/infrahub/git/integrator.py @@ -47,6 +47,7 @@ from infrahub_sdk.schema import InfrahubRepositoryArtifactDefinitionConfig from infrahub_sdk.transforms import InfrahubTransform + from infrahub.git.models import RequestArtifactGenerate from infrahub.message_bus import messages # pylint: disable=too-many-lines @@ -1228,11 +1229,9 @@ async def execute_python_transform( module = importlib.import_module(file_info.module_name) - transform_class: InfrahubTransform = getattr(module, class_name) + transform_class: type[InfrahubTransform] = getattr(module, class_name) - transform = await transform_class.init( - root_directory=commit_worktree.directory, branch=branch_name, client=client - ) + transform = transform_class(root_directory=commit_worktree.directory, branch=branch_name, client=client) return await transform.run(data=data) except ModuleNotFoundError as exc: @@ -1313,7 +1312,7 @@ async def artifact_generate( return ArtifactGenerateResult(changed=True, checksum=checksum, storage_id=storage_id, artifact_id=artifact.id) async def render_artifact( - self, artifact: CoreArtifact, message: Union[messages.CheckArtifactCreate, messages.RequestArtifactGenerate] + self, artifact: CoreArtifact, message: Union[messages.CheckArtifactCreate, RequestArtifactGenerate] ) -> ArtifactGenerateResult: response = await self.sdk.query_gql_query( name=message.query, diff --git a/backend/infrahub/message_bus/messages/request_artifact_generate.py b/backend/infrahub/git/models.py similarity index 72% rename from backend/infrahub/message_bus/messages/request_artifact_generate.py rename to backend/infrahub/git/models.py index 048c37057f..11d566414b 100644 --- a/backend/infrahub/message_bus/messages/request_artifact_generate.py +++ b/backend/infrahub/git/models.py @@ -1,11 +1,20 @@ from typing import Optional -from pydantic import Field +from pydantic import BaseModel, Field -from infrahub.message_bus import InfrahubMessage +class RequestArtifactDefinitionGenerate(BaseModel): + """Sent to trigger the generation of artifacts for a given branch.""" -class RequestArtifactGenerate(InfrahubMessage): + artifact_definition: str = Field(..., description="The unique ID of the Artifact Definition") + branch: str = Field(..., description="The branch to target") + limit: list[str] = Field( + default_factory=list, + description="List of targets to limit the scope of the generation, if populated only the included artifacts will be regenerated", + ) + + +class RequestArtifactGenerate(BaseModel): """Runs to generate an artifact""" artifact_name: str = Field(..., description="Name of the artifact") diff --git a/backend/infrahub/git/tasks.py b/backend/infrahub/git/tasks.py index b91fa120a5..eb33aa7c43 100644 --- a/backend/infrahub/git/tasks.py +++ b/backend/infrahub/git/tasks.py @@ -8,13 +8,22 @@ from infrahub.exceptions import RepositoryError from infrahub.services import services -from .repository import InfrahubRepository +from ..log import get_logger +from ..tasks.artifact import define_artifact +from ..workflows.catalogue import REQUEST_ARTIFACT_DEFINITION_GENERATE, REQUEST_ARTIFACT_GENERATE +from ..workflows.utils import add_branch_tag +from .models import RequestArtifactDefinitionGenerate, RequestArtifactGenerate +from .repository import InfrahubRepository, get_initialized_repo + +log = get_logger() @flow(name="git-repositories-branch-create") async def create_branch(branch: str, branch_id: str) -> None: """Request to the creation of git branches in available repositories.""" service = services.service + await add_branch_tag(branch_name=branch) + repositories: list[CoreRepository] = await service.client.filters(kind=CoreRepository) batch = await service.client.create_batch() @@ -108,3 +117,120 @@ async def git_branch_create( repo = await InfrahubRepository.init(id=repository_id, name=repository_name, client=client) async with lock.registry.get(name=repository_name, namespace="repository"): await repo.create_branch_in_git(branch_name=branch, branch_id=branch_id) + + +@flow(name="artifact-definition-generate") +async def generate_artifact_definition(branch: str) -> None: + service = services.service + artifact_definitions = await service.client.all(kind=InfrahubKind.ARTIFACTDEFINITION, branch=branch, include=["id"]) + + for artifact_definition in artifact_definitions: + model = RequestArtifactDefinitionGenerate(branch=branch, artifact_definition=artifact_definition.id) + await service.workflow.submit_workflow( + workflow=REQUEST_ARTIFACT_DEFINITION_GENERATE, parameters={"model": model} + ) + + +@flow(name="artifact-generate") +async def generate_artifact(model: RequestArtifactGenerate) -> None: + log.debug("Generating artifact", message=model) + + service = services.service + + repo = await get_initialized_repo( + repository_id=model.repository_id, + name=model.repository_name, + service=service, + repository_kind=model.repository_kind, + ) + + artifact = await define_artifact(message=model, service=service) + + try: + result = await repo.render_artifact(artifact=artifact, message=model) + log.debug( + "Generated artifact", + name=model.artifact_name, + changed=result.changed, + checksum=result.checksum, + artifact_id=result.artifact_id, + storage_id=result.storage_id, + ) + except Exception as exc: # pylint: disable=broad-except + log.exception("Failed to generate artifact", error=exc) + artifact.status.value = "Error" + await artifact.save() + + +@flow(name="artifact-definition-generate") +async def generate_request_artifact_definition(model: RequestArtifactDefinitionGenerate) -> None: + await add_branch_tag(branch_name=model.branch) + + service = services.service + artifact_definition = await service.client.get( + kind=InfrahubKind.ARTIFACTDEFINITION, id=model.artifact_definition, branch=model.branch + ) + + await artifact_definition.targets.fetch() + group = artifact_definition.targets.peer + await group.members.fetch() + + existing_artifacts = await service.client.filters( + kind=InfrahubKind.ARTIFACT, + definition__ids=[model.artifact_definition], + include=["object"], + branch=model.branch, + ) + artifacts_by_member = {} + for artifact in existing_artifacts: + artifacts_by_member[artifact.object.peer.id] = artifact.id + + await artifact_definition.transformation.fetch() + transformation_repository = artifact_definition.transformation.peer.repository + + await transformation_repository.fetch() + + transform = artifact_definition.transformation.peer + await transform.query.fetch() + query = transform.query.peer + repository = transformation_repository.peer + branch = await service.client.branch.get(branch_name=model.branch) + if branch.sync_with_git: + repository = await service.client.get( + kind=InfrahubKind.GENERICREPOSITORY, id=repository.id, branch=model.branch, fragment=True + ) + transform_location = "" + + if transform.typename == InfrahubKind.TRANSFORMJINJA2: + transform_location = transform.template_path.value + elif transform.typename == InfrahubKind.TRANSFORMPYTHON: + transform_location = f"{transform.file_path.value}::{transform.class_name.value}" + + for relationship in group.members.peers: + member = relationship.peer + artifact_id = artifacts_by_member.get(member.id) + if model.limit and artifact_id not in model.limit: + continue + + request_artifact_generate_model = RequestArtifactGenerate( + artifact_name=artifact_definition.name.value, + artifact_id=artifact_id, + artifact_definition=model.artifact_definition, + commit=repository.commit.value, + content_type=artifact_definition.content_type.value, + transform_type=transform.typename, + transform_location=transform_location, + repository_id=repository.id, + repository_name=repository.name.value, + repository_kind=repository.get_kind(), + branch_name=model.branch, + query=query.name.value, + variables=member.extract(params=artifact_definition.parameters.value), + target_id=member.id, + target_name=member.display_label, + timeout=transform.timeout.value, + ) + + await service.workflow.submit_workflow( + workflow=REQUEST_ARTIFACT_GENERATE, parameters={"model": request_artifact_generate_model} + ) diff --git a/backend/infrahub/graphql/api/dependencies.py b/backend/infrahub/graphql/api/dependencies.py index 7bd391d131..1d6308dff8 100644 --- a/backend/infrahub/graphql/api/dependencies.py +++ b/backend/infrahub/graphql/api/dependencies.py @@ -6,7 +6,6 @@ from ..auth.query_permission_checker.anonymous_checker import AnonymousGraphQLPermissionChecker from ..auth.query_permission_checker.checker import GraphQLQueryPermissionChecker from ..auth.query_permission_checker.default_branch_checker import DefaultBranchPermissionChecker -from ..auth.query_permission_checker.default_checker import DefaultGraphQLPermissionChecker from ..auth.query_permission_checker.merge_operation_checker import MergeBranchPermissionChecker from ..auth.query_permission_checker.object_permission_checker import ( AccountManagerPermissionChecker, @@ -25,6 +24,7 @@ def get_anonymous_access_setting() -> bool: def build_graphql_query_permission_checker() -> GraphQLQueryPermissionChecker: return GraphQLQueryPermissionChecker( [ + AnonymousGraphQLPermissionChecker(get_anonymous_access_setting), # This checker never raises, it either terminates the checker chains (user is super admin) or go to the next one SuperAdminPermissionChecker(), DefaultBranchPermissionChecker(), @@ -34,8 +34,6 @@ def build_graphql_query_permission_checker() -> GraphQLQueryPermissionChecker: ObjectPermissionChecker(), ReadWriteGraphQLPermissionChecker(), # Deprecated, will be replace by either a global permission or object permissions ReadOnlyGraphQLPermissionChecker(), # Deprecated, will be replace by either a global permission or object permissions - AnonymousGraphQLPermissionChecker(get_anonymous_access_setting), - DefaultGraphQLPermissionChecker(), ] ) diff --git a/backend/infrahub/graphql/api/endpoints.py b/backend/infrahub/graphql/api/endpoints.py index f96e727afe..c7cd4d52bd 100644 --- a/backend/infrahub/graphql/api/endpoints.py +++ b/backend/infrahub/graphql/api/endpoints.py @@ -6,6 +6,7 @@ from infrahub.api.dependencies import get_branch_dep from infrahub.core import registry from infrahub.core.branch import Branch +from infrahub.graphql.manager import GraphQLSchemaManager from .dependencies import build_graphql_app @@ -21,7 +22,7 @@ @router.get("/schema.graphql", include_in_schema=False) async def get_graphql_schema(branch: Branch = Depends(get_branch_dep)) -> PlainTextResponse: - schema = registry.schema.get_schema_branch(name=branch.name) - gql_schema = schema.get_graphql_schema() - - return PlainTextResponse(content=print_schema(gql_schema)) + schema_branch = registry.schema.get_schema_branch(name=branch.name) + gqlm = GraphQLSchemaManager.get_manager_for_branch(branch=branch, schema_branch=schema_branch) + graphql_schema = gqlm.get_graphql_schema() + return PlainTextResponse(content=print_schema(graphql_schema)) diff --git a/backend/infrahub/graphql/auth/query_permission_checker/anonymous_checker.py b/backend/infrahub/graphql/auth/query_permission_checker/anonymous_checker.py index 254a5d3a90..27086f4d26 100644 --- a/backend/infrahub/graphql/auth/query_permission_checker/anonymous_checker.py +++ b/backend/infrahub/graphql/auth/query_permission_checker/anonymous_checker.py @@ -25,6 +25,6 @@ async def check( query_parameters: GraphqlParams, branch: Branch, ) -> CheckerResolution: - if self.anonymous_access_allowed_func() and not analyzed_query.contains_mutation: - return CheckerResolution.TERMINATE - raise AuthorizationError("Authentication is required to perform this operation") + if not self.anonymous_access_allowed_func() or analyzed_query.contains_mutation: + raise AuthorizationError("Authentication is required to perform this operation") + return CheckerResolution.NEXT_CHECKER diff --git a/backend/infrahub/graphql/auth/query_permission_checker/default_branch_checker.py b/backend/infrahub/graphql/auth/query_permission_checker/default_branch_checker.py index 883e6e6720..9219a89f97 100644 --- a/backend/infrahub/graphql/auth/query_permission_checker/default_branch_checker.py +++ b/backend/infrahub/graphql/auth/query_permission_checker/default_branch_checker.py @@ -1,3 +1,4 @@ +from infrahub import config from infrahub.auth import AccountSession from infrahub.core import registry from infrahub.core.account import GlobalPermission @@ -15,7 +16,7 @@ class DefaultBranchPermissionChecker(GraphQLQueryPermissionCheckerInterface): """Checker that makes sure a user account can edit data in the default branch.""" permission_required = GlobalPermission( - id="", name="", action=GlobalPermissions.EDIT_DEFAULT_BRANCH.value, decision=PermissionDecision.ALLOW_ALL.value + action=GlobalPermissions.EDIT_DEFAULT_BRANCH.value, decision=PermissionDecision.ALLOW_ALL.value ) exempt_operations = [ "BranchCreate", @@ -26,7 +27,7 @@ class DefaultBranchPermissionChecker(GraphQLQueryPermissionCheckerInterface): ] async def supports(self, db: InfrahubDatabase, account_session: AccountSession, branch: Branch) -> bool: - return account_session.authenticated + return config.SETTINGS.main.allow_anonymous_access or account_session.authenticated async def check( self, @@ -39,7 +40,7 @@ async def check( can_edit_default_branch = False for permission_backend in registry.permission_backends: can_edit_default_branch = await permission_backend.has_permission( - db=db, account_id=account_session.account_id, permission=self.permission_required, branch=branch + db=db, account_session=account_session, permission=self.permission_required, branch=branch ) if can_edit_default_branch: break diff --git a/backend/infrahub/graphql/auth/query_permission_checker/default_checker.py b/backend/infrahub/graphql/auth/query_permission_checker/default_checker.py deleted file mode 100644 index 3d156f9723..0000000000 --- a/backend/infrahub/graphql/auth/query_permission_checker/default_checker.py +++ /dev/null @@ -1,23 +0,0 @@ -from infrahub.auth import AccountSession -from infrahub.core.branch import Branch -from infrahub.database import InfrahubDatabase -from infrahub.exceptions import AuthorizationError -from infrahub.graphql.analyzer import InfrahubGraphQLQueryAnalyzer -from infrahub.graphql.initialization import GraphqlParams - -from .interface import CheckerResolution, GraphQLQueryPermissionCheckerInterface - - -class DefaultGraphQLPermissionChecker(GraphQLQueryPermissionCheckerInterface): - async def supports(self, db: InfrahubDatabase, account_session: AccountSession, branch: Branch) -> bool: - return True - - async def check( - self, - db: InfrahubDatabase, - account_session: AccountSession, - analyzed_query: InfrahubGraphQLQueryAnalyzer, - query_parameters: GraphqlParams, - branch: Branch, - ) -> CheckerResolution: - raise AuthorizationError("Authentication is required to perform this operation") diff --git a/backend/infrahub/graphql/auth/query_permission_checker/merge_operation_checker.py b/backend/infrahub/graphql/auth/query_permission_checker/merge_operation_checker.py index 878bf8840b..a8de539292 100644 --- a/backend/infrahub/graphql/auth/query_permission_checker/merge_operation_checker.py +++ b/backend/infrahub/graphql/auth/query_permission_checker/merge_operation_checker.py @@ -1,3 +1,4 @@ +from infrahub import config from infrahub.auth import AccountSession from infrahub.core import registry from infrahub.core.account import GlobalPermission @@ -15,11 +16,11 @@ class MergeBranchPermissionChecker(GraphQLQueryPermissionCheckerInterface): """Checker that makes sure a user account can merge a branch without going through a proposed change.""" permission_required = GlobalPermission( - id="", name="", action=GlobalPermissions.MERGE_BRANCH.value, decision=PermissionDecision.ALLOW_ALL.value + action=GlobalPermissions.MERGE_BRANCH.value, decision=PermissionDecision.ALLOW_ALL.value ) async def supports(self, db: InfrahubDatabase, account_session: AccountSession, branch: Branch) -> bool: - return account_session.authenticated + return config.SETTINGS.main.allow_anonymous_access or account_session.authenticated async def check( self, @@ -33,7 +34,7 @@ async def check( can_merge_branch = False for permission_backend in registry.permission_backends: can_merge_branch = await permission_backend.has_permission( - db=db, account_id=account_session.account_id, permission=self.permission_required, branch=branch + db=db, account_session=account_session, permission=self.permission_required, branch=branch ) if can_merge_branch: break diff --git a/backend/infrahub/graphql/auth/query_permission_checker/object_permission_checker.py b/backend/infrahub/graphql/auth/query_permission_checker/object_permission_checker.py index 7fee47e3c8..514a284793 100644 --- a/backend/infrahub/graphql/auth/query_permission_checker/object_permission_checker.py +++ b/backend/infrahub/graphql/auth/query_permission_checker/object_permission_checker.py @@ -1,3 +1,4 @@ +from infrahub import config from infrahub.auth import AccountSession from infrahub.core import registry from infrahub.core.account import GlobalPermission, ObjectPermission @@ -19,7 +20,7 @@ class ObjectPermissionChecker(GraphQLQueryPermissionCheckerInterface): """Checker that makes sure a user account can perform some action on some kind of objects.""" async def supports(self, db: InfrahubDatabase, account_session: AccountSession, branch: Branch) -> bool: - return account_session.authenticated + return config.SETTINGS.main.allow_anonymous_access or account_session.authenticated async def check( self, @@ -61,7 +62,6 @@ async def check( extracted_words = extract_camelcase_words(kind) permissions.append( ObjectPermission( - id="", namespace=extracted_words[0], name="".join(extracted_words[1:]), action=action.lower(), @@ -73,12 +73,12 @@ async def check( has_permission = False for permission_backend in registry.permission_backends: has_permission = await permission_backend.has_permission( - db=db, account_id=account_session.account_id, permission=permission, branch=branch + db=db, account_session=account_session, permission=permission, branch=branch ) if not has_permission: raise PermissionDeniedError(f"You do not have the following permission: {permission}") - return CheckerResolution.NEXT_CHECKER + return CheckerResolution.TERMINATE class AccountManagerPermissionChecker(GraphQLQueryPermissionCheckerInterface): @@ -88,11 +88,11 @@ class AccountManagerPermissionChecker(GraphQLQueryPermissionCheckerInterface): """ permission_required = GlobalPermission( - id="", name="", action=GlobalPermissions.MANAGE_ACCOUNTS.value, decision=PermissionDecision.ALLOW_ALL.value + action=GlobalPermissions.MANAGE_ACCOUNTS.value, decision=PermissionDecision.ALLOW_ALL.value ) async def supports(self, db: InfrahubDatabase, account_session: AccountSession, branch: Branch) -> bool: - return account_session.authenticated + return config.SETTINGS.main.allow_anonymous_access or account_session.authenticated async def check( self, @@ -122,7 +122,7 @@ async def check( has_permission = False for permission_backend in registry.permission_backends: if has_permission := await permission_backend.has_permission( - db=db, account_id=account_session.account_id, permission=self.permission_required, branch=branch + db=db, account_session=account_session, permission=self.permission_required, branch=branch ): break @@ -139,11 +139,11 @@ class PermissionManagerPermissionChecker(GraphQLQueryPermissionCheckerInterface) """ permission_required = GlobalPermission( - id="", name="", action=GlobalPermissions.MANAGE_PERMISSIONS.value, decision=PermissionDecision.ALLOW_ALL.value + action=GlobalPermissions.MANAGE_PERMISSIONS.value, decision=PermissionDecision.ALLOW_ALL.value ) async def supports(self, db: InfrahubDatabase, account_session: AccountSession, branch: Branch) -> bool: - return account_session.authenticated + return config.SETTINGS.main.allow_anonymous_access or account_session.authenticated async def check( self, @@ -170,7 +170,7 @@ async def check( for permission_backend in registry.permission_backends: if not await permission_backend.has_permission( - db=db, account_id=account_session.account_id, permission=self.permission_required, branch=branch + db=db, account_session=account_session, permission=self.permission_required, branch=branch ): raise PermissionDeniedError("You do not have the permission to manage permissions") @@ -184,11 +184,11 @@ class RepositoryManagerPermissionChecker(GraphQLQueryPermissionCheckerInterface) """ permission_required = GlobalPermission( - id="", name="", action=GlobalPermissions.MANAGE_REPOSITORIES.value, decision=PermissionDecision.ALLOW_ALL.value + action=GlobalPermissions.MANAGE_REPOSITORIES.value, decision=PermissionDecision.ALLOW_ALL.value ) async def supports(self, db: InfrahubDatabase, account_session: AccountSession, branch: Branch) -> bool: - return account_session.authenticated + return config.SETTINGS.main.allow_anonymous_access or account_session.authenticated async def check( self, @@ -215,7 +215,7 @@ async def check( for permission_backend in registry.permission_backends: if not await permission_backend.has_permission( - db=db, account_id=account_session.account_id, permission=self.permission_required, branch=branch + db=db, account_session=account_session, permission=self.permission_required, branch=branch ): raise PermissionDeniedError("You do not have the permission to manage repositories") diff --git a/backend/infrahub/graphql/auth/query_permission_checker/super_admin_checker.py b/backend/infrahub/graphql/auth/query_permission_checker/super_admin_checker.py index 9871f2ade9..216607fcb8 100644 --- a/backend/infrahub/graphql/auth/query_permission_checker/super_admin_checker.py +++ b/backend/infrahub/graphql/auth/query_permission_checker/super_admin_checker.py @@ -1,3 +1,4 @@ +from infrahub import config from infrahub.auth import AccountSession from infrahub.core import registry from infrahub.core.account import GlobalPermission @@ -14,11 +15,11 @@ class SuperAdminPermissionChecker(GraphQLQueryPermissionCheckerInterface): """Checker allows a user to do anything (if the checker runs first).""" permission_required = GlobalPermission( - id="", name="", action=GlobalPermissions.SUPER_ADMIN.value, decision=PermissionDecision.ALLOW_ALL.value + action=GlobalPermissions.SUPER_ADMIN.value, decision=PermissionDecision.ALLOW_ALL.value ) async def supports(self, db: InfrahubDatabase, account_session: AccountSession, branch: Branch) -> bool: - return account_session.authenticated + return config.SETTINGS.main.allow_anonymous_access or account_session.authenticated async def check( self, @@ -30,7 +31,7 @@ async def check( ) -> CheckerResolution: for permission_backend in registry.permission_backends: if await permission_backend.has_permission( - db=db, account_id=account_session.account_id, permission=self.permission_required, branch=branch + db=db, account_session=account_session, permission=self.permission_required, branch=branch ): return CheckerResolution.TERMINATE diff --git a/backend/infrahub/graphql/initialization.py b/backend/infrahub/graphql/initialization.py index 757957e478..5efef8fd3a 100644 --- a/backend/infrahub/graphql/initialization.py +++ b/backend/infrahub/graphql/initialization.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import TYPE_CHECKING, Optional, Union +from typing import TYPE_CHECKING, Optional from starlette.background import BackgroundTasks @@ -52,21 +52,20 @@ def active_account_session(self) -> AccountSession: def prepare_graphql_params( db: InfrahubDatabase, - branch: Union[Branch, str], - at: Optional[Union[Timestamp, str]] = None, - account_session: Optional[AccountSession] = None, - request: Optional[HTTPConnection] = None, - service: Optional[InfrahubServices] = None, + branch: Branch | str, + at: Timestamp | str | None = None, + account_session: AccountSession | None = None, + request: HTTPConnection | None = None, + service: InfrahubServices | None = None, include_query: bool = True, include_mutation: bool = True, include_subscription: bool = True, include_types: bool = True, ) -> GraphqlParams: branch = registry.get_branch_from_registry(branch=branch) - schema = registry.schema.get_schema_branch(name=branch.name) - - gqlm = schema.get_graphql_manager() - gql_schema = schema.get_graphql_schema( + schema_branch = registry.schema.get_schema_branch(name=branch.name) + gqlm = GraphQLSchemaManager.get_manager_for_branch(branch=branch, schema_branch=schema_branch) + gql_schema = gqlm.get_graphql_schema( include_query=include_query, include_mutation=include_mutation, include_subscription=include_subscription, @@ -82,7 +81,7 @@ def prepare_graphql_params( db=db, branch=branch, at=Timestamp(at), - types=gqlm._graphql_types, + types=gqlm.get_graphql_types(), related_node_ids=set(), background=BackgroundTasks(), request=request, @@ -90,21 +89,3 @@ def prepare_graphql_params( account_session=account_session, ), ) - - -def generate_graphql_schema( - db: InfrahubDatabase, # pylint: disable=unused-argument - branch: Union[Branch, str], - include_query: bool = True, - include_mutation: bool = True, - include_subscription: bool = True, - include_types: bool = True, -) -> GraphQLSchema: - branch = registry.get_branch_from_registry(branch) - schema = registry.schema.get_schema_branch(name=branch.name) - return GraphQLSchemaManager(schema=schema).generate( - include_query=include_query, - include_mutation=include_mutation, - include_subscription=include_subscription, - include_types=include_types, - ) diff --git a/backend/infrahub/graphql/manager.py b/backend/infrahub/graphql/manager.py index 3ae7c525db..6d5599580f 100644 --- a/backend/infrahub/graphql/manager.py +++ b/backend/infrahub/graphql/manager.py @@ -16,6 +16,7 @@ ProfileSchema, RelationshipSchema, ) +from infrahub.core.timestamp import Timestamp from infrahub.graphql.mutations.attribute import BaseAttributeCreate, BaseAttributeUpdate from infrahub.graphql.mutations.graphql_query import InfrahubGraphQLQueryMutation from infrahub.types import ATTRIBUTE_TYPES, InfrahubDataType, get_attribute_type @@ -67,8 +68,11 @@ if TYPE_CHECKING: from graphql import GraphQLSchema + from infrahub.core.branch import Branch from infrahub.core.schema.schema_branch import SchemaBranch +# pylint: disable=redefined-builtin,c-extension-no-member,too-many-lines,too-many-public-methods + class DeleteInput(graphene.InputObjectType): id = graphene.String(required=False) @@ -94,16 +98,71 @@ def get_attr_kind(node_schema: MainSchemaTypes, attr_schema: AttributeSchema) -> return get_enum_attribute_type_name(node_schema=node_schema, attr_schema=attr_schema) +@dataclass +class BranchDetails: + branch_name: str + schema_changed_at: Timestamp + schema_hash: str + gql_manager: GraphQLSchemaManager + + class GraphQLSchemaManager: # pylint: disable=too-many-public-methods _extra_types: dict[str, GraphQLTypes] = { "DiffSummaryElementAttribute": DiffSummaryElementAttribute, "DiffSummaryElementRelationshipOne": DiffSummaryElementRelationshipOne, "DiffSummaryElementRelationshipMany": DiffSummaryElementRelationshipMany, } + _branch_details_by_name: dict[str, BranchDetails] = {} + + @classmethod + def clear_cache(cls) -> None: + cls._branch_details_by_name = {} + + @classmethod + def _cache_branch( + cls, branch: Branch, schema_branch: SchemaBranch, schema_hash: str | None = None + ) -> BranchDetails: + if not schema_hash: + if branch.schema_hash: + schema_hash = branch.schema_hash.main + else: + schema_hash = schema_branch.get_hash() + branch_details = BranchDetails( + branch_name=branch.name, + schema_changed_at=Timestamp(branch.schema_changed_at) if branch.schema_changed_at else Timestamp(), + schema_hash=schema_hash, + gql_manager=cls(schema=schema_branch), + ) + cls._branch_details_by_name[branch.name] = branch_details + return branch_details + + @classmethod + def get_manager_for_branch(cls, branch: Branch, schema_branch: SchemaBranch) -> GraphQLSchemaManager: + if branch.name not in cls._branch_details_by_name: + branch_details = cls._cache_branch(branch=branch, schema_branch=schema_branch) + return branch_details.gql_manager + cached_branch_details = cls._branch_details_by_name[branch.name] + # try to use the schema_changed_at time b/c it is faster than checking the hash + if branch.schema_changed_at: + changed_at_time = Timestamp(branch.schema_changed_at) + if changed_at_time > cached_branch_details.schema_changed_at: + cached_branch_details = cls._cache_branch(branch=branch, schema_branch=schema_branch) + return cached_branch_details.gql_manager + if branch.schema_hash: + current_hash = branch.active_schema_hash.main + else: + current_hash = schema_branch.get_hash() + if cached_branch_details.schema_hash != current_hash: + cached_branch_details = cls._cache_branch( + branch=branch, schema_branch=schema_branch, schema_hash=current_hash + ) + + return cached_branch_details.gql_manager def __init__(self, schema: SchemaBranch) -> None: self.schema = schema + self._full_graphql_schema: GraphQLSchema | None = None self._graphql_types: dict[str, GraphQLTypes] = {} self._load_attribute_types() @@ -111,6 +170,27 @@ def __init__(self, schema: SchemaBranch) -> None: self._load_all_enum_types(node_schemas=self.schema.get_all().values()) self._load_node_interface() + def get_graphql_types(self) -> dict[str, GraphQLTypes]: + return self._graphql_types + + def get_graphql_schema( + self, + include_query: bool = True, + include_mutation: bool = True, + include_subscription: bool = True, + include_types: bool = True, + ) -> GraphQLSchema: + if all((include_query, include_mutation, include_subscription, include_types)): + if not self._full_graphql_schema: + self._full_graphql_schema = self.generate() + return self._full_graphql_schema + return self.generate( + include_query=include_query, + include_mutation=include_mutation, + include_subscription=include_subscription, + include_types=include_types, + ) + def generate( self, include_query: bool = True, @@ -128,7 +208,16 @@ def generate( query = self.get_gql_query() if include_query else None mutation = self.get_gql_mutation() if include_mutation else None - subscription = self.get_gql_subscription() if include_subscription else None + subscription = None + if include_subscription: + partial_graphene_schema = graphene.Schema( + query=query, + mutation=mutation, + types=types, + auto_camelcase=False, + directives=DIRECTIVES, + ) + subscription = self.get_gql_subscription(partial_graphql_schema=partial_graphene_schema.graphql_schema) graphene_schema = graphene.Schema( query=query, @@ -157,9 +246,9 @@ class Mutation(InfrahubBaseMutation, MutationMixin): # type: ignore return Mutation - def get_gql_subscription(self) -> type[InfrahubBaseSubscription]: + def get_gql_subscription(self, partial_graphql_schema: graphene.Schema) -> type[InfrahubBaseSubscription]: class Subscription(InfrahubBaseSubscription): - pass + graphql_schema = partial_graphql_schema return Subscription @@ -242,12 +331,12 @@ def _load_enum_type(self, node_schema: MainSchemaTypes) -> None: def _get_related_input_type(self, relationship: RelationshipSchema) -> type[RelatedNodeInput]: peer_schema = self.schema.get(name=relationship.peer, duplicate=False) if (isinstance(peer_schema, NodeSchema) and peer_schema.is_ip_prefix()) or ( - isinstance(peer_schema, GenericSchema) and InfrahubKind.IPPREFIX == relationship.peer + isinstance(peer_schema, GenericSchema) and relationship.peer == InfrahubKind.IPPREFIX ): return RelatedPrefixNodeInput if (isinstance(peer_schema, NodeSchema) and peer_schema.is_ip_address()) or ( - isinstance(peer_schema, GenericSchema) and InfrahubKind.IPADDRESS == relationship.peer + isinstance(peer_schema, GenericSchema) and relationship.peer == InfrahubKind.IPADDRESS ): return RelatedIPAddressNodeInput diff --git a/backend/infrahub/graphql/mutations/artifact_definition.py b/backend/infrahub/graphql/mutations/artifact_definition.py index caef54be9c..ff4daa262c 100644 --- a/backend/infrahub/graphql/mutations/artifact_definition.py +++ b/backend/infrahub/graphql/mutations/artifact_definition.py @@ -6,8 +6,9 @@ from typing_extensions import Self from infrahub.core.schema import NodeSchema +from infrahub.git.models import RequestArtifactDefinitionGenerate from infrahub.log import get_logger -from infrahub.message_bus import messages +from infrahub.workflows.catalogue import REQUEST_ARTIFACT_DEFINITION_GENERATE from .main import InfrahubMutationMixin, InfrahubMutationOptions @@ -54,13 +55,11 @@ async def mutate_create( artifact_definition, result = await super().mutate_create(info=info, data=data, branch=branch, at=at) - events = [ - messages.RequestArtifactDefinitionGenerate(artifact_definition=artifact_definition.id, branch=branch.name), - ] - if context.service: - for event in events: - await context.service.send(message=event) + model = RequestArtifactDefinitionGenerate(branch=branch.name, artifact_definition=artifact_definition.id) + await context.service.workflow.submit_workflow( + workflow=REQUEST_ARTIFACT_DEFINITION_GENERATE, parameters={"model": model} + ) return artifact_definition, result @@ -78,12 +77,10 @@ async def mutate_update( artifact_definition, result = await super().mutate_update(info=info, data=data, branch=branch, at=at) - events = [ - messages.RequestArtifactDefinitionGenerate(artifact_definition=artifact_definition.id, branch=branch.name), - ] - if context.service: - for event in events: - await context.service.send(message=event) + model = RequestArtifactDefinitionGenerate(branch=branch.name, artifact_definition=artifact_definition.id) + await context.service.workflow.submit_workflow( + workflow=REQUEST_ARTIFACT_DEFINITION_GENERATE, parameters={"model": model} + ) return artifact_definition, result diff --git a/backend/infrahub/graphql/mutations/branch.py b/backend/infrahub/graphql/mutations/branch.py index a86c03e311..d556c1a6ca 100644 --- a/backend/infrahub/graphql/mutations/branch.py +++ b/backend/infrahub/graphql/mutations/branch.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any import pydantic from graphene import Boolean, Field, InputField, InputObjectType, List, Mutation, String @@ -8,21 +8,25 @@ from opentelemetry import trace from typing_extensions import Self -from infrahub import config, lock +from infrahub import lock from infrahub.core import registry from infrahub.core.branch import Branch from infrahub.core.diff.branch_differ import BranchDiffer -from infrahub.core.diff.ipam_diff_parser import IpamDiffParser +from infrahub.core.diff.coordinator import DiffCoordinator +from infrahub.core.diff.merger.merger import DiffMerger +from infrahub.core.diff.repository.repository import DiffRepository from infrahub.core.merge import BranchMerger -from infrahub.core.migrations.schema.runner import schema_migrations_runner from infrahub.core.task import UserTask -from infrahub.core.validators.checker import schema_validators_checker +from infrahub.core.validators.determiner import ConstraintValidatorDeterminer +from infrahub.core.validators.models.validate_migration import SchemaValidateMigrationData +from infrahub.core.validators.tasks import schema_validate_migrations from infrahub.database import retry_db_transaction +from infrahub.dependencies.registry import get_component_registry from infrahub.exceptions import BranchNotFoundError, ValidationError from infrahub.log import get_log_data, get_logger from infrahub.message_bus import Meta, messages -from infrahub.services import services from infrahub.worker import WORKER_IDENTITY +from infrahub.workflows.catalogue import BRANCH_MERGE, BRANCH_REBASE from ..types import BranchType @@ -183,85 +187,22 @@ class Arguments: object = Field(BranchType) @classmethod - @retry_db_transaction(name="branch_rebase") async def mutate(cls, root: dict, info: GraphQLResolveInfo, data: BranchNameInput) -> Self: context: GraphqlContext = info.context if not context.service: raise ValueError("Service must be provided to rebase a branch.") - async with UserTask.from_graphql_context(title=f"Rebase branch : {data.name}", context=context) as task: - obj = await Branch.get_by_name(db=context.db, name=str(data.name)) - merger = BranchMerger(db=context.db, source_branch=obj, service=context.service) - - # If there are some changes related to the schema between this branch and main, we need to - # - Run all the validations to ensure everything if correct before rebasing the branch - # - Run all the migrations after the rebase - if obj.has_schema_changes: - candidate_schema = merger.get_candidate_schema() - constraints = await merger.calculate_validations(target_schema=candidate_schema) - error_messages, _ = await schema_validators_checker( - branch=obj, schema=candidate_schema, constraints=constraints, service=context.service - ) - if error_messages: - raise ValidationError(",\n".join(error_messages)) - - schema_in_main_before = merger.destination_schema.duplicate() - - async with context.db.start_transaction() as dbt: - await obj.rebase(db=dbt) - await task.info(message="Branch successfully rebased", db=dbt) - - if obj.has_schema_changes: - # NOTE there is a bit additional work in order to calculate a proper diff that will - # allow us to pull only the part of the schema that has changed, for now the safest option is to pull - # Everything - # schema_diff = await merger.has_schema_changes() - updated_schema = await registry.schema.load_schema_from_db( - db=context.db, - branch=obj, - # schema=merger.source_schema.duplicate(), - # schema_diff=schema_diff, - ) - registry.schema.set_schema_branch(name=obj.name, schema=updated_schema) - obj.update_schema_hash() - await obj.save(db=context.db) - - # Execute the migrations - migrations = await merger.calculate_migrations(target_schema=updated_schema) - - errors = await schema_migrations_runner( - branch=merger.source_branch, - new_schema=candidate_schema, - previous_schema=schema_in_main_before, - migrations=migrations, - service=context.service, - ) - for error in errors: - context.service.log.error(error) + obj = await Branch.get_by_name(db=context.db, name=str(data.name)) - fields = await extract_fields_first_node(info=info) + await context.service.workflow.execute_workflow(workflow=BRANCH_REBASE, parameters={"branch": obj.name}) - ok = True - - log_data = get_log_data() - request_id = log_data.get("request_id", "") - differ = await merger.get_graph_diff() - diff_parser = IpamDiffParser( - db=context.db, - differ=differ, - source_branch_name=obj.name, - target_branch_name=registry.default_branch, - ) - ipam_node_details = await diff_parser.get_changed_ipam_node_details() - message = messages.EventBranchRebased( - branch=obj.name, - ipam_node_details=ipam_node_details, - meta=Meta(initiator_id=WORKER_IDENTITY, request_id=request_id), - ) - await context.service.send(message=message) + # Pull the latest information about the branch from the database directly + obj = await Branch.get_by_name(db=context.db, name=str(data.name)) + fields = await extract_fields_first_node(info=info) + ok = True - return cls(object=await obj.to_graphql(fields=fields.get("object", {})), ok=ok) + return cls(object=await obj.to_graphql(fields=fields.get("object", {})), ok=ok) class BranchValidate(Mutation): @@ -305,53 +246,55 @@ class Arguments: object = Field(BranchType) @classmethod - @retry_db_transaction(name="branch_merge") async def mutate(cls, root: dict, info: GraphQLResolveInfo, data: BranchNameInput) -> Self: context: GraphqlContext = info.context - async with UserTask.from_graphql_context(title=f"Merge branch: {data['name']}", context=context) as task: - obj = await Branch.get_by_name(db=context.db, name=data["name"]) - - merger: Optional[BranchMerger] = None - async with lock.registry.global_graph_lock(): - async with context.db.start_transaction() as db: - merger = BranchMerger(db=db, source_branch=obj, service=context.service) - await merger.merge() - await merger.update_schema() - - fields = await extract_fields(info.field_nodes[0].selection_set) + if not context.service: + raise ValueError("Service must be provided to merge a branch.") - ok = True + obj = await Branch.get_by_name(db=context.db, name=data["name"]) + base_branch = await Branch.get_by_name(db=context.db, name=registry.default_branch) + + component_registry = get_component_registry() + diff_coordinator = await component_registry.get_component(DiffCoordinator, db=context.db, branch=obj) + diff_repository = await component_registry.get_component(DiffRepository, db=context.db, branch=obj) + diff_merger = await component_registry.get_component(DiffMerger, db=context.db, branch=obj) + enriched_diff = await diff_coordinator.update_branch_diff(base_branch=base_branch, diff_branch=obj) + if enriched_diff.get_all_conflicts(): + raise ValidationError( + f"Branch {obj.name} contains conflicts with the default branch." + " Please create a Proposed Change to resolve the conflicts or manually update them before merging." + ) + node_diff_field_summaries = await diff_repository.get_node_field_summaries( + diff_branch_name=enriched_diff.diff_branch_name, diff_id=enriched_diff.uuid + ) + + merger = BranchMerger( + db=context.db, + diff_coordinator=diff_coordinator, + diff_merger=diff_merger, + source_branch=obj, + service=context.service, + ) + candidate_schema = merger.get_candidate_schema() + determiner = ConstraintValidatorDeterminer(schema_branch=candidate_schema) + constraints = await determiner.get_constraints(node_diffs=node_diff_field_summaries) + if obj.has_schema_changes: + constraints += await merger.calculate_validations(target_schema=candidate_schema) + + if constraints: + error_messages = await schema_validate_migrations( + message=SchemaValidateMigrationData(branch=obj, schema_branch=candidate_schema, constraints=constraints) + ) + if error_messages: + raise ValidationError(",\n".join(error_messages)) - if merger and merger.migrations and context.service: - errors = await schema_migrations_runner( - branch=merger.destination_branch, - new_schema=merger.destination_schema, - previous_schema=merger.initial_source_schema, - migrations=merger.migrations, - service=context.service, - ) - for error in errors: - await task.error(message=error) + await context.service.workflow.execute_workflow(workflow=BRANCH_MERGE, parameters={"branch": obj.name}) - if config.SETTINGS.broker.enable and context.background: - log_data = get_log_data() - request_id = log_data.get("request_id", "") + # Pull the latest information about the branch from the database directly + obj = await Branch.get_by_name(db=context.db, name=data["name"]) - differ = await merger.get_graph_diff() - diff_parser = IpamDiffParser( - db=context.db, - differ=differ, - source_branch_name=obj.name, - target_branch_name=registry.default_branch, - ) - ipam_node_details = await diff_parser.get_changed_ipam_node_details() - message = messages.EventBranchMerge( - source_branch=obj.name, - target_branch=registry.default_branch, - ipam_node_details=ipam_node_details, - meta=Meta(initiator_id=WORKER_IDENTITY, request_id=request_id), - ) - context.background.add_task(services.send, message) + fields = await extract_fields(info.field_nodes[0].selection_set) + ok = True - return cls(object=await obj.to_graphql(fields=fields.get("object", {})), ok=ok) + return cls(object=await obj.to_graphql(fields=fields.get("object", {})), ok=ok) diff --git a/backend/infrahub/graphql/mutations/diff.py b/backend/infrahub/graphql/mutations/diff.py index 8941b999ff..631bc3d3ca 100644 --- a/backend/infrahub/graphql/mutations/diff.py +++ b/backend/infrahub/graphql/mutations/diff.py @@ -5,8 +5,9 @@ from infrahub.core import registry from infrahub.core.diff.coordinator import DiffCoordinator +from infrahub.core.diff.models import RequestDiffUpdate from infrahub.dependencies.registry import get_component_registry -from infrahub.message_bus import messages +from infrahub.workflows.catalogue import REQUEST_DIFF_UPDATE if TYPE_CHECKING: from ..initialization import GraphqlContext @@ -55,13 +56,13 @@ async def mutate( return {"ok": True} - message = messages.RequestDiffUpdate( + model = RequestDiffUpdate( branch_name=str(data.branch), name=data.name, from_time=from_timestamp_str, to_time=to_timestamp_str, ) if context.service: - await context.service.send(message=message) + await context.service.workflow.submit_workflow(workflow=REQUEST_DIFF_UPDATE, parameters={"model": model}) return {"ok": True} diff --git a/backend/infrahub/graphql/mutations/proposed_change.py b/backend/infrahub/graphql/mutations/proposed_change.py index 35ecf7bd03..3bf8c0761e 100644 --- a/backend/infrahub/graphql/mutations/proposed_change.py +++ b/backend/infrahub/graphql/mutations/proposed_change.py @@ -3,7 +3,6 @@ from graphene import Boolean, InputObjectType, Mutation, String from graphql import GraphQLResolveInfo -from infrahub import lock from infrahub.core.account import GlobalPermission from infrahub.core.branch import Branch from infrahub.core.constants import ( @@ -14,10 +13,7 @@ ProposedChangeState, ValidatorConclusion, ) -from infrahub.core.diff.ipam_diff_parser import IpamDiffParser from infrahub.core.manager import NodeManager -from infrahub.core.merge import BranchMerger -from infrahub.core.migrations.schema.runner import schema_migrations_runner from infrahub.core.node import Node from infrahub.core.registry import registry from infrahub.core.schema import NodeSchema @@ -25,10 +21,8 @@ from infrahub.exceptions import BranchNotFoundError, ValidationError from infrahub.graphql.mutations.main import InfrahubMutationMixin from infrahub.graphql.types.enums import CheckType as GraphQLCheckType -from infrahub.log import get_log_data -from infrahub.message_bus import Meta, messages -from infrahub.services import services -from infrahub.worker import WORKER_IDENTITY +from infrahub.message_bus import messages +from infrahub.workflows.catalogue import BRANCH_MERGE from .main import InfrahubMutationOptions @@ -108,10 +102,8 @@ async def mutate_update( # pylint: disable=too-many-branches for permission_backend in registry.permission_backends: if has_merge_permission := await permission_backend.has_permission( db=context.db, - account_id=context.active_account_session.account_id, + account_session=context.active_account_session, permission=GlobalPermission( - id="", - name="", action=GlobalPermissions.MERGE_PROPOSED_CHANGE.value, decision=PermissionDecision.ALLOW_ALL.value, ), @@ -142,7 +134,6 @@ async def mutate_update( # pylint: disable=too-many-branches if updated_state == ProposedChangeState.MERGED and not has_merge_permission: raise ValidationError("You do not have the permission to merge proposed changes") - merger: Optional[BranchMerger] = None async with context.db.start_transaction() as dbt: proposed_change, result = await super().mutate_update( info=info, data=data, branch=branch, at=at, database=dbt, node=obj @@ -171,40 +162,10 @@ async def mutate_update( # pylint: disable=too-many-branches keep_source_value = check.keep_branch.value.value == "source" conflict_resolution[check.conflicts.value[0]["path"]] = keep_source_value - async with lock.registry.global_graph_lock(): - merger = BranchMerger(db=dbt, source_branch=source_branch, service=context.service) - await merger.merge(conflict_resolution=conflict_resolution) - await merger.update_schema() - - if context.background: - log_data = get_log_data() - request_id = log_data.get("request_id", "") - differ = await merger.get_graph_diff() - diff_parser = IpamDiffParser( - db=context.db, - differ=differ, - source_branch_name=obj.name, - target_branch_name=registry.default_branch, - ) - ipam_node_details = await diff_parser.get_changed_ipam_node_details() - message = messages.EventBranchMerge( - source_branch=source_branch.name, - target_branch=registry.default_branch, - ipam_node_details=ipam_node_details, - meta=Meta(initiator_id=WORKER_IDENTITY, request_id=request_id), - ) - context.background.add_task(services.send, message) - - if merger and merger.migrations: - errors = await schema_migrations_runner( - branch=merger.destination_branch, - new_schema=merger.destination_schema, - previous_schema=merger.initial_source_schema, - migrations=merger.migrations, - service=context.service, - ) - for error in errors: - context.service.log.error(error) + await context.service.workflow.execute_workflow( + workflow=BRANCH_MERGE, + parameters={"branch": source_branch.name, "conflict_resolution": conflict_resolution}, + ) return proposed_change, result diff --git a/backend/infrahub/graphql/queries/account.py b/backend/infrahub/graphql/queries/account.py index 89f8296a25..28e2a7faab 100644 --- a/backend/infrahub/graphql/queries/account.py +++ b/backend/infrahub/graphql/queries/account.py @@ -70,6 +70,7 @@ async def resolve_account_tokens( class AccountGlobalPermissionNode(ObjectType): id = Field(String, required=True) + description = Field(String, required=False) name = Field(String, required=True) action = Field(String, required=True) decision = Field(String, required=True) @@ -78,7 +79,7 @@ class AccountGlobalPermissionNode(ObjectType): class AccountObjectPermissionNode(ObjectType): id = Field(String, required=True) - branch = Field(String, required=True) + description = Field(String, required=False) namespace = Field(String, required=True) name = Field(String, required=True) action = Field(String, required=True) @@ -122,7 +123,7 @@ async def resolve_account_permissions( permissions: AssignedPermissions = {"global_permissions": [], "object_permissions": []} for permission_backend in registry.permission_backends: backend_permissions = await permission_backend.load_permissions( - db=context.db, account_id=context.account_session.account_id, branch=context.branch + db=context.db, account_session=context.account_session, branch=context.branch ) permissions["global_permissions"].extend(backend_permissions["global_permissions"]) permissions["object_permissions"].extend(backend_permissions["object_permissions"]) @@ -135,7 +136,7 @@ async def resolve_account_permissions( { "node": { "id": obj.id, - "name": obj.name, + "description": obj.description, "action": obj.action, "decision": obj.decision, "identifier": str(obj), @@ -150,6 +151,7 @@ async def resolve_account_permissions( { "node": { "id": obj.id, + "description": obj.description, "namespace": obj.namespace, "name": obj.name, "action": obj.action, diff --git a/backend/infrahub/graphql/subscription/__init__.py b/backend/infrahub/graphql/subscription/__init__.py index 8f9ed088c4..f023a34590 100644 --- a/backend/infrahub/graphql/subscription/__init__.py +++ b/backend/infrahub/graphql/subscription/__init__.py @@ -1,23 +1,37 @@ -from typing import Any, Iterable, Optional +from typing import Any, AsyncGenerator -from graphene import ObjectType +from graphene import Field, Int, ObjectType, Schema, String +from graphene.types.generic import GenericScalar from graphql import GraphQLResolveInfo -from .graphql_query import GraphQLQuerySubscription, resolver_graphql_query +from .graphql_query import resolver_graphql_query + +GraphQLQuerySubscription = Field( + GenericScalar(), + name=String(), + params=GenericScalar(required=False), + interval=Int(required=False), +) class InfrahubBaseSubscription(ObjectType): query = GraphQLQuerySubscription + graphql_schema: Schema | None = None - @staticmethod + @classmethod async def subscribe_query( + cls, parent: dict, # pylint: disable=unused-argument info: GraphQLResolveInfo, name: str, - params: Optional[dict[str, Any]] = None, - interval: Optional[int] = 10, - ) -> Iterable[dict]: + params: dict[str, Any] | None = None, + interval: int | None = 10, + ) -> AsyncGenerator[dict[str, Any], None]: + if not cls.graphql_schema: + raise RuntimeError("Subscription initialized without graphql schema") + if not interval: + interval = 10 async for result in resolver_graphql_query( - parent=parent, info=info, name=name, params=params, interval=interval + parent=parent, info=info, name=name, graphql_schema=cls.graphql_schema, params=params, interval=interval ): yield result diff --git a/backend/infrahub/graphql/subscription/graphql_query.py b/backend/infrahub/graphql/subscription/graphql_query.py index 20f866b4de..45b3e6745f 100644 --- a/backend/infrahub/graphql/subscription/graphql_query.py +++ b/backend/infrahub/graphql/subscription/graphql_query.py @@ -1,11 +1,9 @@ import asyncio from typing import TYPE_CHECKING, Any, AsyncGenerator -from graphene import Field, Int, String -from graphene.types.generic import GenericScalar +from graphene import Schema from graphql import GraphQLResolveInfo, graphql -from infrahub.core import registry from infrahub.core.constants import InfrahubKind from infrahub.core.manager import NodeManager from infrahub.core.protocols import CoreGraphQLQuery @@ -22,6 +20,7 @@ async def resolver_graphql_query( parent: dict, # pylint: disable=unused-argument info: GraphQLResolveInfo, name: str, + graphql_schema: Schema, params: dict[str, Any] | None = None, interval: int = 10, ) -> AsyncGenerator[dict[str, Any], None]: @@ -36,9 +35,6 @@ async def resolver_graphql_query( if not graphql_query: raise ValueError(f"Unable to find the {InfrahubKind.GRAPHQLQUERY} {name}") - schema_branch = registry.schema.get_schema_branch(name=context.branch.name) - graphql_schema = schema_branch.get_graphql_schema() - while True: async with context.db.start_session() as db: result = await graphql( @@ -54,11 +50,3 @@ async def resolver_graphql_query( yield result.data await asyncio.sleep(delay=float(interval)) - - -GraphQLQuerySubscription = Field( - GenericScalar(), - name=String(), - params=GenericScalar(required=False), - interval=Int(required=False), -) diff --git a/backend/infrahub/graphql/types/attribute.py b/backend/infrahub/graphql/types/attribute.py index f722fb8e91..7c9c4868bb 100644 --- a/backend/infrahub/graphql/types/attribute.py +++ b/backend/infrahub/graphql/types/attribute.py @@ -7,7 +7,7 @@ from infrahub.core import registry -from .enums import PermissionDecision +from .enums import BranchRelativePermissionDecision from .interface import InfrahubInterface @@ -57,7 +57,7 @@ class RelatedPrefixNodeInput(InputObjectType): class PermissionType(ObjectType): - update_value = Field(PermissionDecision, required=False) + update_value = Field(BranchRelativePermissionDecision, required=False) class AttributeInterface(InfrahubInterface): diff --git a/backend/infrahub/graphql/types/enums.py b/backend/infrahub/graphql/types/enums.py index df26aab8b9..0ea9d67ea8 100644 --- a/backend/infrahub/graphql/types/enums.py +++ b/backend/infrahub/graphql/types/enums.py @@ -1,9 +1,10 @@ from graphene import Enum from infrahub.core import constants +from infrahub.permissions import constants as permission_constants CheckType = Enum.from_enum(constants.CheckType) Severity = Enum.from_enum(constants.Severity) -PermissionDecision = Enum.from_enum(constants.PermissionDecision) +BranchRelativePermissionDecision = Enum.from_enum(permission_constants.BranchRelativePermissionDecision) diff --git a/backend/infrahub/graphql/types/permission.py b/backend/infrahub/graphql/types/permission.py index 1708ab038f..19a9d949b2 100644 --- a/backend/infrahub/graphql/types/permission.py +++ b/backend/infrahub/graphql/types/permission.py @@ -2,20 +2,30 @@ from graphene import Field, Int, List, ObjectType, String -from infrahub.graphql.types.enums import PermissionDecision +from infrahub.graphql.types.enums import BranchRelativePermissionDecision class ObjectPermission(ObjectType): kind = Field(String, required=True, description="The kind this permission refers to.") - view = Field(PermissionDecision, required=True, description="Indicates the permission level for the read action.") + view = Field( + BranchRelativePermissionDecision, + required=True, + description="Indicates the permission level for the read action.", + ) create = Field( - PermissionDecision, required=True, description="Indicates the permission level for the create action." + BranchRelativePermissionDecision, + required=True, + description="Indicates the permission level for the create action.", ) update = Field( - PermissionDecision, required=True, description="Indicates the permission level for the update action." + BranchRelativePermissionDecision, + required=True, + description="Indicates the permission level for the update action.", ) delete = Field( - PermissionDecision, required=True, description="Indicates the permission level for the delete action." + BranchRelativePermissionDecision, + required=True, + description="Indicates the permission level for the delete action.", ) diff --git a/backend/infrahub/log.py b/backend/infrahub/log.py index 5e3a5cb621..1ccbf2e0d5 100644 --- a/backend/infrahub/log.py +++ b/backend/infrahub/log.py @@ -1,3 +1,4 @@ +import importlib import logging import os from typing import TYPE_CHECKING, Any @@ -29,6 +30,11 @@ def set_log_data(key: str, value: Any) -> None: def configure_logging(production: bool = True, log_level: str = "INFO") -> None: + # Importing prefect.main here triggers prefect.logging.configuration.setup_logging() + # to be executed, this function wipes out the previous logging configuration and + # starts from a clean slate. After this has been imported once we can reinject + # the infrahub logger + importlib.import_module("prefect.main") shared_processors: list[Processor] = [ structlog.contextvars.merge_contextvars, structlog.processors.StackInfoRenderer(), @@ -36,6 +42,7 @@ def configure_logging(production: bool = True, log_level: str = "INFO") -> None: structlog.stdlib.add_logger_name, structlog.stdlib.add_log_level, ] + logging.getLogger("httpx").setLevel(logging.ERROR) if production: shared_processors.append(structlog.processors.format_exc_info) @@ -60,6 +67,10 @@ def configure_logging(production: bool = True, log_level: str = "INFO") -> None: handler = logging.StreamHandler() handler.setFormatter(formatter) root_logger = logging.getLogger() + for existing_handler in root_logger.handlers: + if isinstance(existing_handler, logging.StreamHandler): + root_logger.removeHandler(existing_handler) + root_logger.addHandler(handler) root_logger.setLevel(log_level) diff --git a/backend/infrahub/menu/constants.py b/backend/infrahub/menu/constants.py index e0040eb425..499428a35b 100644 --- a/backend/infrahub/menu/constants.py +++ b/backend/infrahub/menu/constants.py @@ -7,4 +7,4 @@ class MenuSection(InfrahubStringEnum): DEFAULT_MENU = "Other" -FULL_DEFAULT_MENU = "Builtin:Other" +FULL_DEFAULT_MENU = "BuiltinOther" diff --git a/backend/infrahub/menu/generator.py b/backend/infrahub/menu/generator.py index 49645bba7f..49459797fc 100644 --- a/backend/infrahub/menu/generator.py +++ b/backend/infrahub/menu/generator.py @@ -3,31 +3,56 @@ from typing import TYPE_CHECKING from infrahub.core import registry -from infrahub.core.branch import Branch # noqa: TCH001 from infrahub.core.protocols import CoreMenuItem from infrahub.log import get_logger +from infrahub.permissions.constants import AssignedPermissions +from infrahub.permissions.local_backend import LocalPermissionBackend from .constants import FULL_DEFAULT_MENU from .models import MenuDict, MenuItemDict if TYPE_CHECKING: from infrahub.auth import AccountSession + from infrahub.core.branch import Branch from infrahub.database import InfrahubDatabase log = get_logger() def get_full_name(obj: CoreMenuItem) -> str: - return f"{obj.namespace.value}:{obj.name.value}" + return f"{obj.namespace.value}{obj.name.value}" -# pylint: disable=too-many-branches -async def generate_menu( +async def generate_restricted_menu( db: InfrahubDatabase, branch: Branch, menu_items: list[CoreMenuItem], account: AccountSession | None = None ) -> MenuDict: - # FIXME temp hack to avoid pylint to complain - account = account # noqa: PLW0127 + menu = await generate_menu(db=db, branch=branch, menu_items=menu_items) + permissions = AssignedPermissions(global_permissions=[], object_permissions=[]) + perm_backend = LocalPermissionBackend() + + if account: + permissions = await perm_backend.load_permissions(db=db, account_session=account, branch=branch) + + for item in menu.data.values(): + has_permission: bool | None = None + for permission in item.get_global_permissions(): + has_permission = perm_backend.resolve_global_permission( + permissions=permissions["global_permissions"], permission_to_check=permission + ) + if has_permission: + has_permission = True + elif has_permission is None: + has_permission = False + + if has_permission is False: + item.hidden = True + + return menu + + +# pylint: disable=too-many-branches,too-many-statements +async def generate_menu(db: InfrahubDatabase, branch: Branch, menu_items: list[CoreMenuItem]) -> MenuDict: structure = MenuDict() full_schema = registry.schema.get_full(branch=branch, duplicate=False) @@ -63,37 +88,62 @@ async def generate_menu( log.warning( "new_menu_request: unable to find the parent menu item", branch=branch.name, - menu_item=child_item.identifier, + menu_item=full_name, parent_item=parent_full_name, ) - default_menu = structure.find_item(name=FULL_DEFAULT_MENU) - if not default_menu: - raise ValueError("Unable to locate the default menu item") + items_to_add = {schema.kind: False for schema in full_schema.values() if schema.include_in_menu is True} - for schema in full_schema.values(): - if schema.include_in_menu is False: - continue - - menu_item = MenuItemDict.from_schema(model=schema) - already_in_schema = bool(structure.find_item(name=menu_item.identifier)) - if already_in_schema: - continue + nbr_remaining_items_last_round = len(items_to_add.values()) + nbr_remaining_items = len([value for value in items_to_add.values() if value is False]) + while not all(items_to_add.values()): + for item_name, already_done in items_to_add.items(): + if already_done: + continue - if schema.menu_placement: - menu_placement = structure.find_item(name=schema.menu_placement) + schema = full_schema[item_name] + menu_item = MenuItemDict.from_schema(model=schema) + already_in_schema = bool(structure.find_item(name=menu_item.identifier)) + if already_in_schema: + items_to_add[item_name] = True + continue - if menu_placement: + if not schema.menu_placement: + first_element = MenuItemDict.from_schema(model=schema) + first_element.identifier = f"{first_element.identifier}Sub" + first_element.order_weight = 1 + menu_item.children[first_element.identifier] = first_element + structure.data[menu_item.identifier] = menu_item + items_to_add[item_name] = True + elif menu_placement := structure.find_item(name=schema.menu_placement): menu_placement.children[menu_item.identifier] = menu_item + items_to_add[item_name] = True continue - log.warning( - "new_menu_request: unable to find the menu_placement defined in the schema", - branch=branch.name, - item=schema.kind, - menu_placement=schema.menu_placement, - ) + nbr_remaining_items = len([value for value in items_to_add.values() if value is False]) + if nbr_remaining_items_last_round == nbr_remaining_items: + break + nbr_remaining_items_last_round = nbr_remaining_items + # ---------------------------------------------------------------------------- + # Assign the remaining items for which we couldn't find the menu_placement to the default menu + # ---------------------------------------------------------------------------- + default_menu = structure.find_item(name=FULL_DEFAULT_MENU) + if not default_menu: + raise ValueError("Unable to locate the default menu item") + + for item_name, already_done in items_to_add.items(): + if already_done: + continue + schema = full_schema[item_name] + menu_item = MenuItemDict.from_schema(model=schema) + log.warning( + "new_menu_request: unable to find the menu_placement defined in the schema", + branch=branch.name, + item=schema.kind, + menu_placement=schema.menu_placement, + ) default_menu.children[menu_item.identifier] = menu_item + items_to_add[item_name] = True return structure diff --git a/backend/infrahub/menu/menu.py b/backend/infrahub/menu/menu.py index 86cf2ec8e3..d44bddb3bd 100644 --- a/backend/infrahub/menu/menu.py +++ b/backend/infrahub/menu/menu.py @@ -30,6 +30,18 @@ def _extract_node_icon(model: MainSchemaTypes) -> str: icon="mdi:cube-outline", section=MenuSection.OBJECT, order_weight=10000, + children=[ + MenuItemDefinition( + namespace="Builtin", + name="Tag", + label="Tags", + kind=InfrahubKind.TAG, + protected=True, + icon=_extract_node_icon(infrahub_schema.get(InfrahubKind.TAG)), + section=MenuSection.OBJECT, + order_weight=10000, + ) + ], ), MenuItemDefinition( namespace="Builtin", @@ -166,6 +178,57 @@ def _extract_node_icon(model: MainSchemaTypes) -> str: ), ], ), + MenuItemDefinition( + namespace="Builtin", + name="UnifiedStorage", + label="Unified Storage", + icon="mdi:nas", + protected=True, + section=MenuSection.INTERNAL, + order_weight=2500, + children=[ + MenuItemDefinition( + namespace="Builtin", + name="Schema", + label="Schema", + path="/schema", + icon="mdi:file-code", + protected=True, + section=MenuSection.INTERNAL, + order_weight=1000, + ), + MenuItemDefinition( + namespace="Builtin", + name="Git Repository", + label="Repository", + kind=InfrahubKind.GENERICREPOSITORY, + icon=_extract_node_icon(infrahub_schema.get(InfrahubKind.GENERICREPOSITORY)), + protected=True, + section=MenuSection.INTERNAL, + order_weight=2000, + ), + MenuItemDefinition( + namespace="Builtin", + name="Credentials", + label="Credentials", + kind=InfrahubKind.CREDENTIAL, + icon=_extract_node_icon(infrahub_schema.get(InfrahubKind.CREDENTIAL)), + protected=True, + section=MenuSection.INTERNAL, + order_weight=2000, + ), + MenuItemDefinition( + namespace="Builtin", + name="GraphqlQuery", + label="GraphQL Query", + kind=InfrahubKind.GRAPHQLQUERY, + icon=_extract_node_icon(infrahub_schema.get(InfrahubKind.GRAPHQLQUERY)), + protected=True, + section=MenuSection.INTERNAL, + order_weight=3000, + ), + ], + ), MenuItemDefinition( namespace="Builtin", name="Deployment", @@ -173,7 +236,7 @@ def _extract_node_icon(model: MainSchemaTypes) -> str: icon="mdi:rocket-launch", protected=True, section=MenuSection.INTERNAL, - order_weight=2500, + order_weight=3000, children=[ MenuItemDefinition( namespace="Builtin", @@ -249,42 +312,43 @@ def _extract_node_icon(model: MainSchemaTypes) -> str: ), MenuItemDefinition( namespace="Builtin", - name="UnifiedStorage", - label="Unified Storage", - icon="mdi:nas", + name="Integration", + label="Integrations", + icon="mdi:connection", protected=True, section=MenuSection.INTERNAL, - order_weight=3000, + order_weight=3500, children=[ MenuItemDefinition( namespace="Builtin", - name="Schema", - label="Schema", - path="/schema", - icon="mdi:file-code", - protected=True, - section=MenuSection.INTERNAL, - order_weight=1000, - ), - MenuItemDefinition( - namespace="Builtin", - name="Git Repository", - label="Repository", - kind=InfrahubKind.GENERICREPOSITORY, - icon=_extract_node_icon(infrahub_schema.get(InfrahubKind.GENERICREPOSITORY)), - protected=True, - section=MenuSection.INTERNAL, - order_weight=2000, - ), - MenuItemDefinition( - namespace="Builtin", - name="GraphqlQuery", - label="GraphQL Query", - kind=InfrahubKind.GRAPHQLQUERY, - icon=_extract_node_icon(infrahub_schema.get(InfrahubKind.GRAPHQLQUERY)), + name="Webhooks", + label="Webhooks", + icon=_extract_node_icon(infrahub_schema.get(InfrahubKind.CUSTOMWEBHOOK)), protected=True, section=MenuSection.INTERNAL, order_weight=3000, + children=[ + MenuItemDefinition( + namespace="Builtin", + name="WebhookStandard", + label="Webhook", + kind=InfrahubKind.STANDARDWEBHOOK, + icon=_extract_node_icon(infrahub_schema.get(InfrahubKind.STANDARDWEBHOOK)), + protected=True, + section=MenuSection.INTERNAL, + order_weight=1000, + ), + MenuItemDefinition( + namespace="Builtin", + name="WebhookCustom", + label="Custom Webhook", + kind=InfrahubKind.CUSTOMWEBHOOK, + icon=_extract_node_icon(infrahub_schema.get(InfrahubKind.CUSTOMWEBHOOK)), + protected=True, + section=MenuSection.INTERNAL, + order_weight=2000, + ), + ], ), ], ), @@ -296,27 +360,18 @@ def _extract_node_icon(model: MainSchemaTypes) -> str: protected=True, section=MenuSection.INTERNAL, order_weight=10000, + permissions=["global:super_admin:allow_all"], children=[ MenuItemDefinition( namespace="Builtin", name="RoleManagement", - label="Role Management", + label="Users & Permissions", path="/role-management", icon=_extract_node_icon(infrahub_schema.get(InfrahubKind.BASEPERMISSION)), protected=True, section=MenuSection.INTERNAL, order_weight=1000, ), - MenuItemDefinition( - namespace="Builtin", - name="Credentials", - label="Credentials", - kind=InfrahubKind.CREDENTIAL, - icon=_extract_node_icon(infrahub_schema.get(InfrahubKind.CREDENTIAL)), - protected=True, - section=MenuSection.INTERNAL, - order_weight=2000, - ), MenuItemDefinition( namespace="Builtin", name="Menu", @@ -327,37 +382,6 @@ def _extract_node_icon(model: MainSchemaTypes) -> str: section=MenuSection.INTERNAL, order_weight=2500, ), - MenuItemDefinition( - namespace="Builtin", - name="Webhooks", - label="Webhooks", - icon=_extract_node_icon(infrahub_schema.get(InfrahubKind.CUSTOMWEBHOOK)), - protected=True, - section=MenuSection.INTERNAL, - order_weight=3000, - children=[ - MenuItemDefinition( - namespace="Builtin", - name="WebhookStandard", - label="Webhook", - kind=InfrahubKind.STANDARDWEBHOOK, - icon=_extract_node_icon(infrahub_schema.get(InfrahubKind.STANDARDWEBHOOK)), - protected=True, - section=MenuSection.INTERNAL, - order_weight=1000, - ), - MenuItemDefinition( - namespace="Builtin", - name="WebhookCustom", - label="Custom Webhook", - kind=InfrahubKind.CUSTOMWEBHOOK, - icon=_extract_node_icon(infrahub_schema.get(InfrahubKind.CUSTOMWEBHOOK)), - protected=True, - section=MenuSection.INTERNAL, - order_weight=2000, - ), - ], - ), ], ), ] diff --git a/backend/infrahub/menu/models.py b/backend/infrahub/menu/models.py index db0b17ce1e..ff944c04cd 100644 --- a/backend/infrahub/menu/models.py +++ b/backend/infrahub/menu/models.py @@ -6,6 +6,7 @@ from pydantic import BaseModel, Field from typing_extensions import Self +from infrahub.core.account import GlobalPermission from infrahub.core.node import Node from infrahub.core.protocols import CoreMenuItem from infrahub.core.schema import GenericSchema, MainSchemaTypes, NodeSchema, ProfileSchema @@ -23,11 +24,11 @@ def get_full_name(obj: CoreMenuItem | NodeSchema | GenericSchema | ProfileSchema def _get_full_name_node(obj: CoreMenuItem) -> str: - return f"{obj.namespace.value}:{obj.name.value}" + return f"{obj.namespace.value}{obj.name.value}" def _get_full_name_schema(node: MainSchemaTypes) -> str: - return f"{node.namespace}:{node.name}" + return f"{node.namespace}{node.name}" @dataclass @@ -54,7 +55,9 @@ def to_rest(self) -> Menu: data: dict[str, list[MenuItemList]] = {} for section in [MenuSection.INTERNAL, MenuSection.OBJECT]: - item_per_section = [value.to_list() for key, value in self.data.items() if value.section == section] + item_per_section = [ + value.to_list() for value in self.data.values() if value.section == section and value.hidden is False + ] data[section.value] = sorted(item_per_section, key=lambda d: d.order_weight) return Menu(sections=data) @@ -78,6 +81,7 @@ class MenuItem(BaseModel): kind: str = Field(default="", description="Kind of the model associated with this menuitem if applicable") order_weight: int = 5000 section: MenuSection = MenuSection.OBJECT + permissions: list[str] = Field(default_factory=list) @classmethod def from_node(cls, obj: CoreMenuItem) -> Self: @@ -89,6 +93,7 @@ def from_node(cls, obj: CoreMenuItem) -> Self: path=obj.path.value or "", kind=obj.kind.value or "", section=obj.section.value, + permissions=obj.required_permissions.value or [], ) @classmethod @@ -103,14 +108,23 @@ def from_schema(cls, model: NodeSchema | GenericSchema | ProfileSchema) -> Self: class MenuItemDict(MenuItem): + hidden: bool = False children: dict[str, MenuItemDict] = Field(default_factory=dict, description="Child objects") def to_list(self) -> MenuItemList: data = self.model_dump(exclude={"children"}) - unsorted_children = [child.to_list() for child in self.children.values()] + unsorted_children = [child.to_list() for child in self.children.values() if child.hidden is False] data["children"] = sorted(unsorted_children, key=lambda d: d.order_weight) return MenuItemList(**data) + def get_global_permissions(self) -> list[GlobalPermission]: + permissions: list[GlobalPermission] = [] + for permission in self.permissions: + if not permission.startswith("global"): + continue + permissions.append(GlobalPermission.from_string(input=permission)) + return permissions + class MenuItemList(MenuItem): children: list[MenuItemList] = Field(default_factory=list, description="Child objects") @@ -127,6 +141,7 @@ class MenuItemDefinition(BaseModel): kind: str = "" section: MenuSection = MenuSection.OBJECT order_weight: int = 2000 + permissions: list[str] = Field(default_factory=list) children: list[MenuItemDefinition] = Field(default_factory=list) async def to_node(self, db: InfrahubDatabase, parent: CoreMenuItem | None = None) -> CoreMenuItem: @@ -143,6 +158,7 @@ async def to_node(self, db: InfrahubDatabase, parent: CoreMenuItem | None = None section=self.section.value, order_weight=self.order_weight, parent=parent.id if parent else None, + required_permissions=self.permissions, ) return obj @@ -157,4 +173,4 @@ def get_path(self) -> str | None: @property def full_name(self) -> str: - return f"{self.namespace}:{self.name}" + return f"{self.namespace}{self.name}" diff --git a/backend/infrahub/message_bus/messages/__init__.py b/backend/infrahub/message_bus/messages/__init__.py index 03296fa91c..7c4c9159e6 100644 --- a/backend/infrahub/message_bus/messages/__init__.py +++ b/backend/infrahub/message_bus/messages/__init__.py @@ -13,7 +13,6 @@ from .event_schema_update import EventSchemaUpdate from .event_worker_newprimaryapi import EventWorkerNewPrimaryAPI from .finalize_validator_execution import FinalizeValidatorExecution -from .git_branch_create import GitBranchCreate from .git_diff_namesonly import GitDiffNamesOnly, GitDiffNamesOnlyResponse from .git_file_get import GitFileGet, GitFileGetResponse from .git_repository_add import GitRepositoryAdd @@ -31,12 +30,7 @@ from .refresh_registry_branches import RefreshRegistryBranches from .refresh_registry_rebasedbranch import RefreshRegistryRebasedBranch from .refresh_webhook_configuration import RefreshWebhookConfiguration -from .request_artifact_generate import RequestArtifactGenerate from .request_artifactdefinition_check import RequestArtifactDefinitionCheck -from .request_artifactdefinition_generate import RequestArtifactDefinitionGenerate -from .request_diff_refresh import RequestDiffRefresh -from .request_diff_update import RequestDiffUpdate -from .request_generator_run import RequestGeneratorRun from .request_generatordefinition_check import RequestGeneratorDefinitionCheck from .request_generatordefinition_run import RequestGeneratorDefinitionRun from .request_graphqlquerygroup_update import RequestGraphQLQueryGroupUpdate @@ -47,9 +41,6 @@ from .schema_migration_path import SchemaMigrationPath, SchemaMigrationPathResponse from .schema_validator_path import SchemaValidatorPath, SchemaValidatorPathResponse from .send_echo_request import SendEchoRequest, SendEchoRequestResponse -from .send_telemetry_push import SendTelemetryPush -from .send_webhook_event import SendWebhookEvent -from .trigger_artifact_definition_generate import TriggerArtifactDefinitionGenerate from .trigger_generatordefinition_run import TriggerGeneratorDefinitionRun from .trigger_proposed_change_cancel import TriggerProposedChangeCancel from .trigger_webhook_actions import TriggerWebhookActions @@ -68,7 +59,6 @@ "event.schema.update": EventSchemaUpdate, "event.worker.new_primary_api": EventWorkerNewPrimaryAPI, "finalize.validator.execution": FinalizeValidatorExecution, - "git.branch.create": GitBranchCreate, "git.diff.names_only": GitDiffNamesOnly, "git.file.get": GitFileGet, "git.repository.add": GitRepositoryAdd, @@ -82,12 +72,7 @@ "refresh.registry.branches": RefreshRegistryBranches, "refresh.registry.rebased_branch": RefreshRegistryRebasedBranch, "refresh.webhook.configuration": RefreshWebhookConfiguration, - "request.artifact.generate": RequestArtifactGenerate, "request.artifact_definition.check": RequestArtifactDefinitionCheck, - "request.artifact_definition.generate": RequestArtifactDefinitionGenerate, - "request.diff.update": RequestDiffUpdate, - "request.diff.refresh": RequestDiffRefresh, - "request.generator.run": RequestGeneratorRun, "request.generator_definition.check": RequestGeneratorDefinitionCheck, "request.generator_definition.run": RequestGeneratorDefinitionRun, "request.graphql_query_group.update": RequestGraphQLQueryGroupUpdate, @@ -102,9 +87,6 @@ "request.repository.checks": RequestRepositoryChecks, "request.repository.user_checks": RequestRepositoryUserChecks, "send.echo.request": SendEchoRequest, - "send.webhook.event": SendWebhookEvent, - "send.telemetry.push": SendTelemetryPush, - "trigger.artifact_definition.generate": TriggerArtifactDefinitionGenerate, "trigger.generator_definition.run": TriggerGeneratorDefinitionRun, "trigger.proposed_change.cancel": TriggerProposedChangeCancel, "trigger.webhook.actions": TriggerWebhookActions, diff --git a/backend/infrahub/message_bus/messages/event_branch_merge.py b/backend/infrahub/message_bus/messages/event_branch_merge.py index c438340b8c..b71d584d63 100644 --- a/backend/infrahub/message_bus/messages/event_branch_merge.py +++ b/backend/infrahub/message_bus/messages/event_branch_merge.py @@ -1,6 +1,5 @@ from pydantic import Field -from infrahub.core.ipam.model import IpamNodeDetails from infrahub.message_bus import InfrahubMessage @@ -9,4 +8,3 @@ class EventBranchMerge(InfrahubMessage): source_branch: str = Field(..., description="The source branch") target_branch: str = Field(..., description="The target branch") - ipam_node_details: list[IpamNodeDetails] = Field(default_factory=list, description="Details for changed IP nodes") diff --git a/backend/infrahub/message_bus/messages/event_branch_rebased.py b/backend/infrahub/message_bus/messages/event_branch_rebased.py index 04335ace44..604d9d9bf6 100644 --- a/backend/infrahub/message_bus/messages/event_branch_rebased.py +++ b/backend/infrahub/message_bus/messages/event_branch_rebased.py @@ -1,6 +1,5 @@ from pydantic import Field -from infrahub.core.ipam.model import IpamNodeDetails from infrahub.message_bus import InfrahubMessage @@ -8,4 +7,3 @@ class EventBranchRebased(InfrahubMessage): """Sent when a branch has been rebased.""" branch: str = Field(..., description="The branch that was rebased") - ipam_node_details: list[IpamNodeDetails] = Field(default_factory=list, description="Details for changed IP nodes") diff --git a/backend/infrahub/message_bus/messages/git_branch_create.py b/backend/infrahub/message_bus/messages/git_branch_create.py deleted file mode 100644 index 88119493d5..0000000000 --- a/backend/infrahub/message_bus/messages/git_branch_create.py +++ /dev/null @@ -1,12 +0,0 @@ -from pydantic import Field - -from infrahub.message_bus import InfrahubMessage - - -class GitBranchCreate(InfrahubMessage): - """Create a branch in a Git repository.""" - - branch: str = Field(..., description="Name of the branch to create") - branch_id: str = Field(..., description="The unique ID of the branch") - repository_id: str = Field(..., description="The unique ID of the Repository") - repository_name: str = Field(..., description="The name of the Repository") diff --git a/backend/infrahub/message_bus/messages/request_artifactdefinition_generate.py b/backend/infrahub/message_bus/messages/request_artifactdefinition_generate.py deleted file mode 100644 index 6658826d45..0000000000 --- a/backend/infrahub/message_bus/messages/request_artifactdefinition_generate.py +++ /dev/null @@ -1,16 +0,0 @@ -from typing import List - -from pydantic import Field - -from infrahub.message_bus import InfrahubMessage - - -class RequestArtifactDefinitionGenerate(InfrahubMessage): - """Sent to trigger the generation of artifacts for a given branch.""" - - artifact_definition: str = Field(..., description="The unique ID of the Artifact Definition") - branch: str = Field(..., description="The branch to target") - limit: List[str] = Field( - default_factory=list, - description="List of targets to limit the scope of the generation, if populated only the included artifacts will be regenerated", - ) diff --git a/backend/infrahub/message_bus/messages/send_telemetry_push.py b/backend/infrahub/message_bus/messages/send_telemetry_push.py deleted file mode 100644 index 0f2b148780..0000000000 --- a/backend/infrahub/message_bus/messages/send_telemetry_push.py +++ /dev/null @@ -1,5 +0,0 @@ -from infrahub.message_bus import InfrahubMessage - - -class SendTelemetryPush(InfrahubMessage): - """Push usage telemetry.""" diff --git a/backend/infrahub/message_bus/messages/trigger_artifact_definition_generate.py b/backend/infrahub/message_bus/messages/trigger_artifact_definition_generate.py deleted file mode 100644 index 14f4790840..0000000000 --- a/backend/infrahub/message_bus/messages/trigger_artifact_definition_generate.py +++ /dev/null @@ -1,9 +0,0 @@ -from pydantic import Field - -from infrahub.message_bus import InfrahubMessage - - -class TriggerArtifactDefinitionGenerate(InfrahubMessage): - """Sent after a branch has been merged to start the regeneration of artifacts""" - - branch: str = Field(..., description="The impacted branch") diff --git a/backend/infrahub/message_bus/operations/__init__.py b/backend/infrahub/message_bus/operations/__init__.py index 37efc0f9f8..28fefe17e9 100644 --- a/backend/infrahub/message_bus/operations/__init__.py +++ b/backend/infrahub/message_bus/operations/__init__.py @@ -33,7 +33,6 @@ "event.schema.update": event.schema.update, "event.worker.new_primary_api": event.worker.new_primary_api, "finalize.validator.execution": finalize.validator.execution, - "git.branch.create": git.branch.create, "git.diff.names_only": git.diff.names_only, "git.file.get": git.file.get, "git.repository.add": git.repository.add, @@ -45,15 +44,10 @@ "refresh.registry.branches": refresh.registry.branches, "refresh.registry.rebased_branch": refresh.registry.rebased_branch, "refresh.webhook.configuration": refresh.webhook.configuration, - "request.diff.refresh": requests.diff.refresh, - "request.diff.update": requests.diff.update, - "request.generator.run": requests.generator.run, "request.generator_definition.check": requests.generator_definition.check, "request.generator_definition.run": requests.generator_definition.run, "request.graphql_query_group.update": requests.graphql_query_group.update, - "request.artifact.generate": requests.artifact.generate, "request.artifact_definition.check": requests.artifact_definition.check, - "request.artifact_definition.generate": requests.artifact_definition.generate, "request.proposed_change.cancel": requests.proposed_change.cancel, "request.proposed_change.data_integrity": requests.proposed_change.data_integrity, "request.proposed_change.pipeline": requests.proposed_change.pipeline, @@ -65,11 +59,8 @@ "request.repository.checks": requests.repository.checks, "request.repository.user_checks": requests.repository.user_checks, "send.echo.request": send.echo.request, - "send.webhook.event": send.webhook.event, - "send.telemetry.push": send.telemetry.push, "schema.migration.path": schema.migration.path, "schema.validator.path": schema.validator.path, - "trigger.artifact_definition.generate": trigger.artifact_definition.generate, "trigger.generator_definition.run": trigger.generator_definition.run, "trigger.proposed_change.cancel": trigger.proposed_change.cancel, "trigger.webhook.actions": trigger.webhook.actions, diff --git a/backend/infrahub/message_bus/operations/event/branch.py b/backend/infrahub/message_bus/operations/event/branch.py index 3cb972d0e8..976c9d70b0 100644 --- a/backend/infrahub/message_bus/operations/event/branch.py +++ b/backend/infrahub/message_bus/operations/event/branch.py @@ -4,12 +4,18 @@ from infrahub.core import registry from infrahub.core.diff.model.path import BranchTrackingId +from infrahub.core.diff.models import RequestDiffRefresh, RequestDiffUpdate from infrahub.core.diff.repository.repository import DiffRepository from infrahub.dependencies.registry import get_component_registry from infrahub.log import get_logger from infrahub.message_bus import InfrahubMessage, messages from infrahub.services import InfrahubServices -from infrahub.workflows.catalogue import GIT_REPOSITORIES_CREATE_BRANCH, IPAM_RECONCILIATION +from infrahub.workflows.catalogue import ( + GIT_REPOSITORIES_CREATE_BRANCH, + REQUEST_DIFF_REFRESH, + REQUEST_DIFF_UPDATE, + TRIGGER_ARTIFACT_DEFINITION_GENERATE, +) log = get_logger() @@ -50,7 +56,6 @@ async def merge(message: messages.EventBranchMerge, service: InfrahubServices) - events: List[InfrahubMessage] = [ messages.RefreshRegistryBranches(), - messages.TriggerArtifactDefinitionGenerate(branch=message.target_branch), messages.TriggerGeneratorDefinitionRun(branch=message.target_branch), ] component_registry = get_component_registry() @@ -60,8 +65,8 @@ async def merge(message: messages.EventBranchMerge, service: InfrahubServices) - branch_diff_roots = await diff_repository.get_empty_roots(base_branch_names=[message.target_branch]) await service.workflow.submit_workflow( - workflow=IPAM_RECONCILIATION, - parameters={"branch": message.target_branch, "ipam_node_details": message.ipam_node_details}, + workflow=TRIGGER_ARTIFACT_DEFINITION_GENERATE, + parameters={"branch": message.target_branch}, ) for diff_root in branch_diff_roots: @@ -70,7 +75,10 @@ async def merge(message: messages.EventBranchMerge, service: InfrahubServices) - and diff_root.tracking_id and isinstance(diff_root.tracking_id, BranchTrackingId) ): - events.append(messages.RequestDiffUpdate(branch_name=diff_root.diff_branch_name)) + request_diff_update_model = RequestDiffUpdate(branch_name=diff_root.diff_branch_name) + await service.workflow.submit_workflow( + workflow=REQUEST_DIFF_UPDATE, parameters={"model": request_diff_update_model} + ) for event in events: event.assign_meta(parent=message) @@ -84,11 +92,6 @@ async def rebased(message: messages.EventBranchRebased, service: InfrahubService events: List[InfrahubMessage] = [ messages.RefreshRegistryRebasedBranch(branch=message.branch), ] - if message.ipam_node_details: - await service.workflow.submit_workflow( - workflow=IPAM_RECONCILIATION, - parameters={"branch": message.branch, "ipam_node_details": message.ipam_node_details}, - ) # for every diff that touches the rebased branch, recalculate it component_registry = get_component_registry() @@ -98,7 +101,12 @@ async def rebased(message: messages.EventBranchRebased, service: InfrahubService for diff_root in diff_roots_to_refresh: if diff_root.base_branch_name != diff_root.diff_branch_name: - events.append(messages.RequestDiffRefresh(branch_name=diff_root.diff_branch_name, diff_id=diff_root.uuid)) + request_diff_refresh_model = RequestDiffRefresh( + branch_name=diff_root.diff_branch_name, diff_id=diff_root.uuid + ) + await service.workflow.submit_workflow( + workflow=REQUEST_DIFF_REFRESH, parameters={"model": request_diff_refresh_model} + ) for event in events: event.assign_meta(parent=message) diff --git a/backend/infrahub/message_bus/operations/git/__init__.py b/backend/infrahub/message_bus/operations/git/__init__.py index 776717bdcc..4eb4925baa 100644 --- a/backend/infrahub/message_bus/operations/git/__init__.py +++ b/backend/infrahub/message_bus/operations/git/__init__.py @@ -1,3 +1,3 @@ -from . import branch, diff, file, repository +from . import diff, file, repository -__all__ = ["branch", "diff", "file", "repository"] +__all__ = ["diff", "file", "repository"] diff --git a/backend/infrahub/message_bus/operations/git/branch.py b/backend/infrahub/message_bus/operations/git/branch.py deleted file mode 100644 index 931241096d..0000000000 --- a/backend/infrahub/message_bus/operations/git/branch.py +++ /dev/null @@ -1,17 +0,0 @@ -from prefect import flow - -from infrahub import lock -from infrahub.git.repository import InfrahubRepository -from infrahub.log import get_logger -from infrahub.message_bus import messages -from infrahub.services import InfrahubServices - -log = get_logger() - - -@flow(name="git-repository-branch-create") -async def create(message: messages.GitBranchCreate, service: InfrahubServices) -> None: - log.info("creating branch in repository", branch=message.branch, repository=message.repository_name) - repo = await InfrahubRepository.init(id=message.repository_id, name=message.repository_name, client=service.client) - async with lock.registry.get(name=message.repository_name, namespace="repository"): - await repo.create_branch_in_git(branch_name=message.branch, branch_id=message.branch_id) diff --git a/backend/infrahub/message_bus/operations/requests/__init__.py b/backend/infrahub/message_bus/operations/requests/__init__.py index ffc574a58c..52eb1ebe8b 100644 --- a/backend/infrahub/message_bus/operations/requests/__init__.py +++ b/backend/infrahub/message_bus/operations/requests/__init__.py @@ -1,8 +1,5 @@ from . import ( - artifact, artifact_definition, - diff, - generator, generator_definition, graphql_query_group, proposed_change, @@ -10,10 +7,7 @@ ) __all__ = [ - "artifact", "artifact_definition", - "diff", - "generator", "generator_definition", "graphql_query_group", "proposed_change", diff --git a/backend/infrahub/message_bus/operations/requests/artifact.py b/backend/infrahub/message_bus/operations/requests/artifact.py deleted file mode 100644 index eb04019d0b..0000000000 --- a/backend/infrahub/message_bus/operations/requests/artifact.py +++ /dev/null @@ -1,38 +0,0 @@ -from prefect import flow - -from infrahub.git.repository import get_initialized_repo -from infrahub.log import get_logger -from infrahub.message_bus import messages -from infrahub.services import InfrahubServices -from infrahub.tasks.artifact import define_artifact - -log = get_logger() - - -@flow(name="artifact-generate") -async def generate(message: messages.RequestArtifactGenerate, service: InfrahubServices) -> None: - log.debug("Generating artifact", message=message) - - repo = await get_initialized_repo( - repository_id=message.repository_id, - name=message.repository_name, - service=service, - repository_kind=message.repository_kind, - ) - - artifact = await define_artifact(message=message, service=service) - - try: - result = await repo.render_artifact(artifact=artifact, message=message) - log.debug( - "Generated artifact", - name=message.artifact_name, - changed=result.changed, - checksum=result.checksum, - artifact_id=result.artifact_id, - storage_id=result.storage_id, - ) - except Exception as exc: # pylint: disable=broad-except - log.exception("Failed to generate artifact", error=exc) - artifact.status.value = "Error" - await artifact.save() diff --git a/backend/infrahub/message_bus/operations/requests/artifact_definition.py b/backend/infrahub/message_bus/operations/requests/artifact_definition.py index 416759b8ce..a0fad05a8d 100644 --- a/backend/infrahub/message_bus/operations/requests/artifact_definition.py +++ b/backend/infrahub/message_bus/operations/requests/artifact_definition.py @@ -136,86 +136,6 @@ async def check(message: messages.RequestArtifactDefinitionCheck, service: Infra await service.send(message=event) -@flow(name="artifact-definition-generate") -async def generate(message: messages.RequestArtifactDefinitionGenerate, service: InfrahubServices) -> None: - log.info( - "Received request to generate artifacts for an artifact_definition", - branch=message.branch, - artifact_definition=message.artifact_definition, - limit=message.limit, - ) - artifact_definition = await service.client.get( - kind=InfrahubKind.ARTIFACTDEFINITION, id=message.artifact_definition, branch=message.branch - ) - - await artifact_definition.targets.fetch() - group = artifact_definition.targets.peer - await group.members.fetch() - - existing_artifacts = await service.client.filters( - kind=InfrahubKind.ARTIFACT, - definition__ids=[message.artifact_definition], - include=["object"], - branch=message.branch, - ) - artifacts_by_member = {} - for artifact in existing_artifacts: - artifacts_by_member[artifact.object.peer.id] = artifact.id - - await artifact_definition.transformation.fetch() - transformation_repository = artifact_definition.transformation.peer.repository - - await transformation_repository.fetch() - - transform = artifact_definition.transformation.peer - await transform.query.fetch() - query = transform.query.peer - repository = transformation_repository.peer - branch = await service.client.branch.get(branch_name=message.branch) - if branch.sync_with_git: - repository = await service.client.get( - kind=InfrahubKind.GENERICREPOSITORY, id=repository.id, branch=message.branch, fragment=True - ) - transform_location = "" - - if transform.typename == InfrahubKind.TRANSFORMJINJA2: - transform_location = transform.template_path.value - elif transform.typename == InfrahubKind.TRANSFORMPYTHON: - transform_location = f"{transform.file_path.value}::{transform.class_name.value}" - - events = [] - for relationship in group.members.peers: - member = relationship.peer - artifact_id = artifacts_by_member.get(member.id) - if message.limit and artifact_id not in message.limit: - continue - - events.append( - messages.RequestArtifactGenerate( - artifact_name=artifact_definition.name.value, - artifact_id=artifact_id, - artifact_definition=message.artifact_definition, - commit=repository.commit.value, - content_type=artifact_definition.content_type.value, - transform_type=transform.typename, - transform_location=transform_location, - repository_id=repository.id, - repository_name=repository.name.value, - repository_kind=repository.get_kind(), - branch_name=message.branch, - query=query.name.value, - variables=member.extract(params=artifact_definition.parameters.value), - target_id=member.id, - target_name=member.display_label, - timeout=transform.timeout.value, - ) - ) - - for event in events: - event.assign_meta(parent=message) - await service.send(message=event) - - def _render_artifact(artifact_id: Optional[str], managed_branch: bool, impacted_artifacts: list[str]) -> bool: """Returns a boolean to indicate if an artifact should be generated or not. Will return true if: diff --git a/backend/infrahub/message_bus/operations/requests/generator_definition.py b/backend/infrahub/message_bus/operations/requests/generator_definition.py index 613897030d..28acfcd27b 100644 --- a/backend/infrahub/message_bus/operations/requests/generator_definition.py +++ b/backend/infrahub/message_bus/operations/requests/generator_definition.py @@ -5,9 +5,11 @@ from infrahub.core.constants import InfrahubKind, ValidatorConclusion, ValidatorState from infrahub.core.timestamp import Timestamp +from infrahub.generators.models import RequestGeneratorRun from infrahub.message_bus import InfrahubMessage, Meta, messages from infrahub.message_bus.types import KVTTL from infrahub.services import InfrahubServices +from infrahub.workflows.catalogue import REQUEST_GENERATOR_RUN @flow(name="generator-definition-check") @@ -140,7 +142,6 @@ async def run(message: messages.RequestGeneratorDefinitionRun, service: Infrahub branch=message.branch, generator_definition=message.generator_definition.definition_id, ) - events: list[InfrahubMessage] = [] group = await service.client.get( kind=InfrahubKind.GENERICGROUP, @@ -168,30 +169,25 @@ async def run(message: messages.RequestGeneratorDefinitionRun, service: Infrahub for relationship in group.members.peers: member = relationship.peer generator_instance = instance_by_member.get(member.id) - events.append( - messages.RequestGeneratorRun( - generator_definition=message.generator_definition, - commit=repository.commit.value, - generator_instance=generator_instance, - repository_id=repository.id, - repository_name=repository.name.value, - repository_kind=repository.typename, - branch_name=message.branch, - query=message.generator_definition.query_name, - variables=member.extract(params=message.generator_definition.parameters), - target_id=member.id, - target_name=member.display_label, - ) + model = RequestGeneratorRun( + generator_definition=message.generator_definition, + commit=repository.commit.value, + generator_instance=generator_instance, + repository_id=repository.id, + repository_name=repository.name.value, + repository_kind=repository.typename, + branch_name=message.branch, + query=message.generator_definition.query_name, + variables=member.extract(params=message.generator_definition.parameters), + target_id=member.id, + target_name=member.display_label, ) + await service.workflow.submit_workflow(workflow=REQUEST_GENERATOR_RUN, parameters={"model": model}) await task_report.info( event=f"Generator triggered for {len(group.members.peers)} members in {group.name.value}." ) - for event in events: - event.assign_meta(parent=message) - await service.send(message=event) - def _run_generator(instance_id: Optional[str], managed_branch: bool, impacted_instances: list[str]) -> bool: """Returns a boolean to indicate if a generator instance needs to be executed diff --git a/backend/infrahub/message_bus/operations/requests/proposed_change.py b/backend/infrahub/message_bus/operations/requests/proposed_change.py index 122f8e703d..174fa26a3f 100644 --- a/backend/infrahub/message_bus/operations/requests/proposed_change.py +++ b/backend/infrahub/message_bus/operations/requests/proposed_change.py @@ -15,7 +15,8 @@ from infrahub import config, lock from infrahub.core.constants import CheckType, InfrahubKind, ProposedChangeState, RepositoryInternalStatus from infrahub.core.diff.coordinator import DiffCoordinator -from infrahub.core.diff.model.diff import SchemaConflict +from infrahub.core.diff.model.diff import DiffElementType, SchemaConflict +from infrahub.core.diff.model.path import NodeDiffFieldSummary from infrahub.core.integrity.object_conflict.conflict_recorder import ObjectConflictValidatorRecorder from infrahub.core.registry import registry from infrahub.core.validators.checker import schema_validators_checker @@ -894,5 +895,22 @@ async def _populate_subscribers(branch_diff: ProposedChangeBranchDiff, service: async def _get_proposed_change_schema_integrity_constraints( message: messages.RequestProposedChangeSchemaIntegrity, schema: SchemaBranch ) -> list[SchemaUpdateConstraintInfo]: + node_diff_field_summary_map: dict[str, NodeDiffFieldSummary] = {} + for node_diff in message.branch_diff.diff_summary: + node_kind = node_diff["kind"] + if node_kind not in node_diff_field_summary_map: + node_diff_field_summary_map[node_kind] = NodeDiffFieldSummary(kind=node_kind) + field_summary = node_diff_field_summary_map[node_kind] + for element in node_diff["elements"]: + element_name = element["name"] + element_type = element["element_type"] + if element_type.lower() in ( + DiffElementType.RELATIONSHIP_MANY.value.lower(), + DiffElementType.RELATIONSHIP_ONE.value.lower(), + ): + field_summary.relationship_names.add(element_name) + elif element_type.lower() in (DiffElementType.ATTRIBUTE.value.lower(),): + field_summary.attribute_names.add(element_name) + determiner = ConstraintValidatorDeterminer(schema_branch=schema) - return await determiner.get_constraints(node_diffs=message.branch_diff.diff_summary) + return await determiner.get_constraints(node_diffs=list(node_diff_field_summary_map.values())) diff --git a/backend/infrahub/message_bus/operations/send/__init__.py b/backend/infrahub/message_bus/operations/send/__init__.py index fb6b746f43..93293e24e3 100644 --- a/backend/infrahub/message_bus/operations/send/__init__.py +++ b/backend/infrahub/message_bus/operations/send/__init__.py @@ -1,3 +1,3 @@ -from . import echo, telemetry, webhook +from . import echo -__all__ = ["echo", "telemetry", "webhook"] +__all__ = ["echo"] diff --git a/backend/infrahub/message_bus/operations/send/webhook.py b/backend/infrahub/message_bus/operations/send/webhook.py deleted file mode 100644 index 4110d78fac..0000000000 --- a/backend/infrahub/message_bus/operations/send/webhook.py +++ /dev/null @@ -1,68 +0,0 @@ -from typing import Any - -import ujson -from prefect import flow -from prefect.logging import get_run_logger - -from infrahub.exceptions import NodeNotFoundError -from infrahub.message_bus import messages -from infrahub.message_bus.messages.send_webhook_event import SendWebhookData -from infrahub.services import InfrahubServices, services -from infrahub.webhook import CustomWebhook, StandardWebhook, TransformWebhook, Webhook - - -@flow(name="event-send-webhook") -async def event(message: messages.SendWebhookEvent, service: InfrahubServices) -> None: - async with service.task_report( - related_node=message.webhook_id, - title="Webhook", - ) as task_report: - webhook_definition = await service.cache.get(key=f"webhook:active:{message.webhook_id}") - if not webhook_definition: - service.log.warning("Webhook not found", webhook_id=message.webhook_id) - raise NodeNotFoundError( - node_type="Webhook", identifier=message.webhook_id, message="The requested Webhook was not found" - ) - - webhook_data = ujson.loads(webhook_definition) - payload: dict[str, Any] = {"event_type": message.event_type, "data": message.event_data, "service": service} - webhook_map: dict[str, type[Webhook]] = { - "standard": StandardWebhook, - "custom": CustomWebhook, - "transform": TransformWebhook, - } - webhook_class = webhook_map[webhook_data["webhook_type"]] - payload.update(webhook_data["webhook_configuration"]) - webhook = webhook_class(**payload) - await webhook.send() - await task_report.finalise( - title=webhook.webhook_type, - logs={"message": "Successfully sent webhook", "severity": "INFO"}, - ) - - -@flow -async def send_webhook(message: SendWebhookData) -> None: - service = services.service - log = get_run_logger() - - webhook_definition = await service.cache.get(key=f"webhook:active:{message.webhook_id}") - if not webhook_definition: - log.warning("Webhook not found") - raise NodeNotFoundError( - node_type="Webhook", identifier=message.webhook_id, message="The requested Webhook was not found" - ) - - webhook_data = ujson.loads(webhook_definition) - payload: dict[str, Any] = {"event_type": message.event_type, "data": message.event_data, "service": service} - webhook_map: dict[str, type[Webhook]] = { - "standard": StandardWebhook, - "custom": CustomWebhook, - "transform": TransformWebhook, - } - webhook_class = webhook_map[webhook_data["webhook_type"]] - payload.update(webhook_data["webhook_configuration"]) - webhook = webhook_class(**payload) - await webhook.send() - - log.info("Successfully sent webhook") diff --git a/backend/infrahub/message_bus/operations/trigger/__init__.py b/backend/infrahub/message_bus/operations/trigger/__init__.py index e403b1b641..f9c9de4ead 100644 --- a/backend/infrahub/message_bus/operations/trigger/__init__.py +++ b/backend/infrahub/message_bus/operations/trigger/__init__.py @@ -1,3 +1,3 @@ -from . import artifact_definition, generator_definition, proposed_change, webhook +from . import generator_definition, proposed_change, webhook -__all__ = ["artifact_definition", "generator_definition", "proposed_change", "webhook"] +__all__ = ["generator_definition", "proposed_change", "webhook"] diff --git a/backend/infrahub/message_bus/operations/trigger/artifact_definition.py b/backend/infrahub/message_bus/operations/trigger/artifact_definition.py deleted file mode 100644 index 1b614f287c..0000000000 --- a/backend/infrahub/message_bus/operations/trigger/artifact_definition.py +++ /dev/null @@ -1,23 +0,0 @@ -from prefect import flow - -from infrahub.core.constants import InfrahubKind -from infrahub.log import get_logger -from infrahub.message_bus import messages -from infrahub.services import InfrahubServices - -log = get_logger() - - -@flow(name="artifact-definition-generate") -async def generate(message: messages.TriggerArtifactDefinitionGenerate, service: InfrahubServices) -> None: - artifact_definitions = await service.client.all( - kind=InfrahubKind.ARTIFACTDEFINITION, branch=message.branch, include=["id"] - ) - - events = [ - messages.RequestArtifactDefinitionGenerate(branch=message.branch, artifact_definition=artifact_definition.id) - for artifact_definition in artifact_definitions - ] - for event in events: - event.assign_meta(parent=message) - await service.send(message=event) diff --git a/backend/infrahub/message_bus/operations/trigger/webhook.py b/backend/infrahub/message_bus/operations/trigger/webhook.py index 200459a8c0..866cb053c3 100644 --- a/backend/infrahub/message_bus/operations/trigger/webhook.py +++ b/backend/infrahub/message_bus/operations/trigger/webhook.py @@ -3,7 +3,9 @@ from prefect import flow from infrahub.message_bus import InfrahubMessage, messages +from infrahub.send.models import SendWebhookData from infrahub.services import InfrahubServices +from infrahub.workflows.catalogue import WEBHOOK_SEND @flow(name="webhook-trigger-actions") @@ -12,11 +14,9 @@ async def actions(message: messages.TriggerWebhookActions, service: InfrahubServ events: List[InfrahubMessage] = [] for webhook in webhooks: webhook_id = webhook.split(":")[-1] - events.append( - messages.SendWebhookEvent( - webhook_id=webhook_id, event_type=message.event_type, event_data=message.event_data - ) - ) + model = SendWebhookData(webhook_id=webhook_id, event_type=message.event_type, event_data=message.event_data) + await service.workflow.submit_workflow(workflow=WEBHOOK_SEND, parameters={"model": model}) + for event in events: event.assign_meta(parent=message) await service.send(message=event) diff --git a/backend/infrahub/permissions/backend.py b/backend/infrahub/permissions/backend.py index db4d7a57fc..cdde5796ff 100644 --- a/backend/infrahub/permissions/backend.py +++ b/backend/infrahub/permissions/backend.py @@ -4,6 +4,7 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: + from infrahub.auth import AccountSession from infrahub.core.account import GlobalPermission, ObjectPermission from infrahub.core.branch import Branch from infrahub.database import InfrahubDatabase @@ -12,7 +13,9 @@ class PermissionBackend(ABC): @abstractmethod - async def load_permissions(self, db: InfrahubDatabase, account_id: str, branch: Branch) -> AssignedPermissions: ... + async def load_permissions( + self, db: InfrahubDatabase, account_session: AccountSession, branch: Branch + ) -> AssignedPermissions: ... @abstractmethod def report_object_permission( @@ -21,5 +24,9 @@ def report_object_permission( @abstractmethod async def has_permission( - self, db: InfrahubDatabase, account_id: str, permission: GlobalPermission | ObjectPermission, branch: Branch + self, + db: InfrahubDatabase, + account_session: AccountSession, + permission: GlobalPermission | ObjectPermission, + branch: Branch, ) -> bool: ... diff --git a/backend/infrahub/permissions/constants.py b/backend/infrahub/permissions/constants.py index 4de88e3464..c4cb88477a 100644 --- a/backend/infrahub/permissions/constants.py +++ b/backend/infrahub/permissions/constants.py @@ -1,6 +1,6 @@ from __future__ import annotations -from enum import IntFlag +from enum import IntFlag, StrEnum, auto from typing import TYPE_CHECKING, TypedDict if TYPE_CHECKING: @@ -17,3 +17,12 @@ class PermissionDecisionFlag(IntFlag): ALLOW_DEFAULT = 2 ALLOW_OTHER = 4 ALLOW_ALL = ALLOW_DEFAULT | ALLOW_OTHER + + +class BranchRelativePermissionDecision(StrEnum): + """This enum is only used to communicate a permission decision relative to a branch.""" + + DENY = auto() + ALLOW = auto() + ALLOW_DEFAULT = auto() + ALLOW_OTHER = auto() diff --git a/backend/infrahub/permissions/local_backend.py b/backend/infrahub/permissions/local_backend.py index 964ad090f0..19b50a5dc3 100644 --- a/backend/infrahub/permissions/local_backend.py +++ b/backend/infrahub/permissions/local_backend.py @@ -2,13 +2,17 @@ from typing import TYPE_CHECKING -from infrahub.core.account import GlobalPermission, ObjectPermission, fetch_permissions +from infrahub import config +from infrahub.core.account import GlobalPermission, ObjectPermission, fetch_permissions, fetch_role_permissions from infrahub.core.constants import GlobalPermissions, PermissionDecision +from infrahub.core.manager import NodeManager +from infrahub.core.protocols import CoreAccountRole from infrahub.permissions.constants import PermissionDecisionFlag from .backend import PermissionBackend if TYPE_CHECKING: + from infrahub.auth import AccountSession from infrahub.core.branch import Branch from infrahub.database import InfrahubDatabase from infrahub.permissions.constants import AssignedPermissions @@ -49,7 +53,7 @@ def report_object_permission( if specificity > highest_specificity: combined_decision = permission_decision highest_specificity = specificity - elif specificity == highest_specificity: + elif specificity == highest_specificity and permission_decision != PermissionDecisionFlag.DENY: combined_decision |= permission_decision return combined_decision @@ -82,17 +86,36 @@ def resolve_global_permission( return grant_permission - async def load_permissions(self, db: InfrahubDatabase, account_id: str, branch: Branch) -> AssignedPermissions: - return await fetch_permissions(db=db, account_id=account_id, branch=branch) + async def load_permissions( + self, db: InfrahubDatabase, account_session: AccountSession, branch: Branch + ) -> AssignedPermissions: + if not account_session.authenticated: + anonymous_permissions: AssignedPermissions = {"global_permissions": [], "object_permissions": []} + if not config.SETTINGS.main.allow_anonymous_access: + return anonymous_permissions + + role = await NodeManager.get_one_by_hfid( + db=db, kind=CoreAccountRole, hfid=[config.SETTINGS.main.anonymous_access_role] + ) + if role: + anonymous_permissions = await fetch_role_permissions(db=db, role_id=role.id, branch=branch) + + return anonymous_permissions + + return await fetch_permissions(db=db, account_id=account_session.account_id, branch=branch) async def has_permission( - self, db: InfrahubDatabase, account_id: str, permission: GlobalPermission | ObjectPermission, branch: Branch + self, + db: InfrahubDatabase, + account_session: AccountSession, + permission: GlobalPermission | ObjectPermission, + branch: Branch, ) -> bool: - granted_permissions = await self.load_permissions(db=db, account_id=account_id, branch=branch) + granted_permissions = await self.load_permissions(db=db, account_session=account_session, branch=branch) is_super_admin = self.resolve_global_permission( permissions=granted_permissions["global_permissions"], permission_to_check=GlobalPermission( - id="", name="", action=GlobalPermissions.SUPER_ADMIN, decision=PermissionDecision.ALLOW_ALL + action=GlobalPermissions.SUPER_ADMIN, decision=PermissionDecision.ALLOW_ALL ), ) diff --git a/backend/infrahub/permissions/report.py b/backend/infrahub/permissions/report.py index 052410554e..2790653a49 100644 --- a/backend/infrahub/permissions/report.py +++ b/backend/infrahub/permissions/report.py @@ -2,9 +2,10 @@ from typing import TYPE_CHECKING +from infrahub.core import registry from infrahub.core.account import GlobalPermission -from infrahub.core.constants import GlobalPermissions, PermissionDecision -from infrahub.permissions.constants import AssignedPermissions, PermissionDecisionFlag +from infrahub.core.constants import GLOBAL_BRANCH_NAME, GlobalPermissions, PermissionDecision +from infrahub.permissions.constants import AssignedPermissions, BranchRelativePermissionDecision, PermissionDecisionFlag from infrahub.permissions.local_backend import LocalPermissionBackend if TYPE_CHECKING: @@ -19,13 +20,16 @@ def get_permission_report( backend: PermissionBackend, permissions: AssignedPermissions, + branch: Branch, node: MainSchemaTypes, action: str, is_super_admin: bool = False, can_edit_default_branch: bool = False, # pylint: disable=unused-argument -) -> PermissionDecisionFlag: +) -> BranchRelativePermissionDecision: + is_default_branch = branch.name in (GLOBAL_BRANCH_NAME, registry.default_branch) + if is_super_admin: - return PermissionDecisionFlag.ALLOW_ALL + return BranchRelativePermissionDecision.ALLOW decision = backend.report_object_permission( permissions=permissions["object_permissions"], namespace=node.namespace, name=node.name, action=action @@ -35,28 +39,36 @@ def get_permission_report( # if can_edit_default_branch: # decision |= PermissionDecisionFlag.ALLOW_DEFAULT - return decision + if ( + decision == PermissionDecisionFlag.ALLOW_ALL + or (decision & PermissionDecisionFlag.ALLOW_DEFAULT and is_default_branch) + or (decision & PermissionDecisionFlag.ALLOW_OTHER and not is_default_branch) + ): + return BranchRelativePermissionDecision.ALLOW + if decision & PermissionDecisionFlag.ALLOW_DEFAULT: + return BranchRelativePermissionDecision.ALLOW_DEFAULT + if decision & PermissionDecisionFlag.ALLOW_OTHER: + return BranchRelativePermissionDecision.ALLOW_OTHER + + return BranchRelativePermissionDecision.DENY async def report_schema_permissions( db: InfrahubDatabase, schemas: list[MainSchemaTypes], account_session: AccountSession, branch: Branch ) -> list[KindPermissions]: perm_backend = LocalPermissionBackend() - permissions = await perm_backend.load_permissions(db=db, account_id=account_session.account_id, branch=branch) + permissions = await perm_backend.load_permissions(db=db, account_session=account_session, branch=branch) is_super_admin = perm_backend.resolve_global_permission( permissions=permissions["global_permissions"], permission_to_check=GlobalPermission( - id="", name="", action=GlobalPermissions.SUPER_ADMIN.value, decision=PermissionDecision.ALLOW_ALL.value + action=GlobalPermissions.SUPER_ADMIN.value, decision=PermissionDecision.ALLOW_ALL.value ), ) can_edit_default_branch = perm_backend.resolve_global_permission( permissions=permissions["global_permissions"], permission_to_check=GlobalPermission( - id="", - name="", - action=GlobalPermissions.EDIT_DEFAULT_BRANCH.value, - decision=PermissionDecision.ALLOW_ALL.value, + action=GlobalPermissions.EDIT_DEFAULT_BRANCH.value, decision=PermissionDecision.ALLOW_ALL.value ), ) @@ -68,6 +80,7 @@ async def report_schema_permissions( "create": get_permission_report( backend=perm_backend, permissions=permissions, + branch=branch, node=node, action="create", is_super_admin=is_super_admin, @@ -76,6 +89,7 @@ async def report_schema_permissions( "delete": get_permission_report( backend=perm_backend, permissions=permissions, + branch=branch, node=node, action="delete", is_super_admin=is_super_admin, @@ -84,6 +98,7 @@ async def report_schema_permissions( "update": get_permission_report( backend=perm_backend, permissions=permissions, + branch=branch, node=node, action="update", is_super_admin=is_super_admin, @@ -92,6 +107,7 @@ async def report_schema_permissions( "view": get_permission_report( backend=perm_backend, permissions=permissions, + branch=branch, node=node, action="view", is_super_admin=is_super_admin, diff --git a/backend/infrahub/permissions/types.py b/backend/infrahub/permissions/types.py index 178b3f9433..5c088874b7 100644 --- a/backend/infrahub/permissions/types.py +++ b/backend/infrahub/permissions/types.py @@ -1,30 +1,14 @@ from __future__ import annotations -from dataclasses import dataclass from typing import TYPE_CHECKING, TypedDict if TYPE_CHECKING: - from infrahub.permissions.constants import PermissionDecisionFlag + from infrahub.permissions.constants import BranchRelativePermissionDecision class KindPermissions(TypedDict): kind: str - create: PermissionDecisionFlag - delete: PermissionDecisionFlag - update: PermissionDecisionFlag - view: PermissionDecisionFlag - - -@dataclass -class GlobalPermissionToVerify: - name: str - action: str - decision: PermissionDecisionFlag - - -@dataclass -class ObjectPermissionToVerify: - namespace: str - name: str - action: str - decision: PermissionDecisionFlag + create: BranchRelativePermissionDecision + delete: BranchRelativePermissionDecision + update: BranchRelativePermissionDecision + view: BranchRelativePermissionDecision diff --git a/backend/infrahub/send/__init__.py b/backend/infrahub/send/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/infrahub/message_bus/messages/send_webhook_event.py b/backend/infrahub/send/models.py similarity index 68% rename from backend/infrahub/message_bus/messages/send_webhook_event.py rename to backend/infrahub/send/models.py index d3eda2f61d..945d6d5eed 100644 --- a/backend/infrahub/message_bus/messages/send_webhook_event.py +++ b/backend/infrahub/send/models.py @@ -1,7 +1,5 @@ from pydantic import BaseModel, Field -from infrahub.message_bus import InfrahubMessage - class SendWebhookData(BaseModel): """Sent a webhook to an external source.""" @@ -9,7 +7,3 @@ class SendWebhookData(BaseModel): webhook_id: str = Field(..., description="The unique ID of the webhook") event_type: str = Field(..., description="The event type") event_data: dict = Field(..., description="The data tied to the event") - - -class SendWebhookEvent(SendWebhookData, InfrahubMessage): - """Sent a webhook to an external source.""" diff --git a/backend/infrahub/send/webhook.py b/backend/infrahub/send/webhook.py new file mode 100644 index 0000000000..3c66ace76b --- /dev/null +++ b/backend/infrahub/send/webhook.py @@ -0,0 +1,37 @@ +from typing import Any + +import ujson +from prefect import flow +from prefect.logging import get_run_logger + +from infrahub.exceptions import NodeNotFoundError +from infrahub.send.models import SendWebhookData +from infrahub.services import services +from infrahub.webhook import CustomWebhook, StandardWebhook, TransformWebhook, Webhook + + +@flow(name="event-send-webhook") +async def send_webhook(model: SendWebhookData) -> None: + service = services.service + log = get_run_logger() + + webhook_definition = await service.cache.get(key=f"webhook:active:{model.webhook_id}") + if not webhook_definition: + log.warning("Webhook not found") + raise NodeNotFoundError( + node_type="Webhook", identifier=model.webhook_id, message="The requested Webhook was not found" + ) + + webhook_data = ujson.loads(webhook_definition) + payload: dict[str, Any] = {"event_type": model.event_type, "data": model.event_data, "service": service} + webhook_map: dict[str, type[Webhook]] = { + "standard": StandardWebhook, + "custom": CustomWebhook, + "transform": TransformWebhook, + } + webhook_class = webhook_map[webhook_data["webhook_type"]] + payload.update(webhook_data["webhook_configuration"]) + webhook = webhook_class(**payload) + await webhook.send() + + log.info("Successfully sent webhook") diff --git a/backend/infrahub/services/__init__.py b/backend/infrahub/services/__init__.py index 3678bc7b23..ba4adffc0f 100644 --- a/backend/infrahub/services/__init__.py +++ b/backend/infrahub/services/__init__.py @@ -55,6 +55,9 @@ def client(self) -> InfrahubClient: return self._client + def set_client(self, client: InfrahubClient) -> None: + self._client = client + @property def database(self) -> InfrahubDatabase: if not self._database: diff --git a/backend/infrahub/services/adapters/workflow/__init__.py b/backend/infrahub/services/adapters/workflow/__init__.py index d36a3eeeeb..4cc14e16e9 100644 --- a/backend/infrahub/services/adapters/workflow/__init__.py +++ b/backend/infrahub/services/adapters/workflow/__init__.py @@ -22,6 +22,7 @@ async def execute_workflow( workflow: WorkflowDefinition, expected_return: type[Return], parameters: dict[str, Any] | None = ..., + tags: list[str] | None = ..., ) -> Return: ... @overload @@ -30,6 +31,7 @@ async def execute_workflow( workflow: WorkflowDefinition, expected_return: None = ..., parameters: dict[str, Any] | None = ..., + tags: list[str] | None = ..., ) -> Any: ... async def execute_workflow( @@ -37,6 +39,7 @@ async def execute_workflow( workflow: WorkflowDefinition, expected_return: type[Return] | None = None, parameters: dict[str, Any] | None = None, + tags: list[str] | None = None, ) -> Any: raise NotImplementedError() @@ -44,5 +47,6 @@ async def submit_workflow( self, workflow: WorkflowDefinition, parameters: dict[str, Any] | None = None, + tags: list[str] | None = None, ) -> WorkflowInfo: raise NotImplementedError() diff --git a/backend/infrahub/services/adapters/workflow/local.py b/backend/infrahub/services/adapters/workflow/local.py index a510924897..f3d7fc8fa0 100644 --- a/backend/infrahub/services/adapters/workflow/local.py +++ b/backend/infrahub/services/adapters/workflow/local.py @@ -12,6 +12,7 @@ async def execute_workflow( workflow: WorkflowDefinition, expected_return: type[Return] | None = None, parameters: dict[str, Any] | None = None, + tags: list[str] | None = None, ) -> Any: fn = workflow.get_function() return await fn(**parameters or {}) @@ -20,6 +21,7 @@ async def submit_workflow( self, workflow: WorkflowDefinition, parameters: dict[str, Any] | None = None, + tags: list[str] | None = None, ) -> WorkflowInfo: await self.execute_workflow(workflow=workflow, parameters=parameters) return WorkflowInfo(id=uuid.uuid4()) diff --git a/backend/infrahub/services/adapters/workflow/worker.py b/backend/infrahub/services/adapters/workflow/worker.py index 741d4b902f..6946d9d217 100644 --- a/backend/infrahub/services/adapters/workflow/worker.py +++ b/backend/infrahub/services/adapters/workflow/worker.py @@ -30,6 +30,7 @@ async def execute_workflow( workflow: WorkflowDefinition, expected_return: type[Return], parameters: dict[str, Any] | None = ..., + tags: list[str] | None = ..., ) -> Return: ... @overload @@ -38,6 +39,7 @@ async def execute_workflow( workflow: WorkflowDefinition, expected_return: None = ..., parameters: dict[str, Any] | None = ..., + tags: list[str] | None = ..., ) -> Any: ... async def execute_workflow( @@ -45,8 +47,11 @@ async def execute_workflow( workflow: WorkflowDefinition, expected_return: type[Return] | None = None, parameters: dict[str, Any] | None = None, + tags: list[str] | None = None, ) -> Any: - response: FlowRun = await run_deployment(name=workflow.full_name, poll_interval=1, parameters=parameters or {}) # type: ignore[return-value, misc] + response: FlowRun = await run_deployment( + name=workflow.full_name, poll_interval=1, parameters=parameters or {}, tags=tags + ) # type: ignore[return-value, misc] if not response.state: raise RuntimeError("Unable to read state from the response") @@ -56,9 +61,7 @@ async def execute_workflow( return await response.state.result(raise_on_failure=True, fetch=True) # type: ignore[call-overload] async def submit_workflow( - self, - workflow: WorkflowDefinition, - parameters: dict[str, Any] | None = None, + self, workflow: WorkflowDefinition, parameters: dict[str, Any] | None = None, tags: list[str] | None = None ) -> WorkflowInfo: - flow_run = await run_deployment(name=workflow.full_name, timeout=0, parameters=parameters or {}) # type: ignore[return-value, misc] + flow_run = await run_deployment(name=workflow.full_name, timeout=0, parameters=parameters or {}, tags=tags) # type: ignore[return-value, misc] return WorkflowInfo.from_flow(flow_run=flow_run) diff --git a/backend/infrahub/services/scheduler.py b/backend/infrahub/services/scheduler.py index 31662b49ec..7094383796 100644 --- a/backend/infrahub/services/scheduler.py +++ b/backend/infrahub/services/scheduler.py @@ -8,7 +8,7 @@ from infrahub import config from infrahub.components import ComponentType from infrahub.tasks.keepalive import refresh_heartbeat -from infrahub.tasks.recurring import push_telemetry, trigger_branch_refresh +from infrahub.tasks.recurring import trigger_branch_refresh if TYPE_CHECKING: from infrahub.services import InfrahubServices, ServiceFunction @@ -43,15 +43,6 @@ async def initialize(self, service: InfrahubServices) -> None: ] self.schedules.extend(schedules) - if not config.SETTINGS.main.telemetry_optout: - self.schedules.append( - Schedule( - name="push_telemetry", - interval=config.SETTINGS.main.telemetry_interval, - function=push_telemetry, - start_delay=3600, # Start pushing only if running for 1 hour - ) - ) if self.service.component_type == ComponentType.GIT_AGENT: schedules = [ Schedule(name="refresh_components", interval=10, function=refresh_heartbeat), diff --git a/backend/infrahub/tasks/artifact.py b/backend/infrahub/tasks/artifact.py index 71b6041af2..7c2ee8c4f6 100644 --- a/backend/infrahub/tasks/artifact.py +++ b/backend/infrahub/tasks/artifact.py @@ -1,15 +1,18 @@ from typing import Union from infrahub_sdk.node import InfrahubNode +from prefect import task from infrahub import lock from infrahub.core.constants import InfrahubKind +from infrahub.git.models import RequestArtifactGenerate from infrahub.message_bus import messages from infrahub.services import InfrahubServices +@task async def define_artifact( - message: Union[messages.CheckArtifactCreate, messages.RequestArtifactGenerate], service: InfrahubServices + message: Union[messages.CheckArtifactCreate, RequestArtifactGenerate], service: InfrahubServices ) -> InfrahubNode: if message.artifact_id: artifact = await service.client.get( diff --git a/backend/infrahub/tasks/recurring.py b/backend/infrahub/tasks/recurring.py index 6026c1243b..644deee2de 100644 --- a/backend/infrahub/tasks/recurring.py +++ b/backend/infrahub/tasks/recurring.py @@ -2,9 +2,6 @@ from typing import TYPE_CHECKING -from infrahub.message_bus import messages -from infrahub.worker import WORKER_IDENTITY - from .registry import refresh_branches if TYPE_CHECKING: @@ -17,10 +14,3 @@ async def trigger_branch_refresh(service: InfrahubServices) -> None: await refresh_branches(db=db) await service.component.refresh_schema_hash() - - -async def push_telemetry(service: InfrahubServices) -> None: - if await service.component.is_primary_api(): - service.log.debug(f"Primary identity matches my identity={WORKER_IDENTITY}. Pushing usage telemetry.") - message = messages.SendTelemetryPush() - await service.send(message=message) diff --git a/backend/infrahub/message_bus/operations/send/telemetry.py b/backend/infrahub/tasks/telemetry.py similarity index 79% rename from backend/infrahub/message_bus/operations/send/telemetry.py rename to backend/infrahub/tasks/telemetry.py index 4e6a36fa48..b481b2214a 100644 --- a/backend/infrahub/message_bus/operations/send/telemetry.py +++ b/backend/infrahub/tasks/telemetry.py @@ -12,8 +12,6 @@ from infrahub.core.branch import Branch from infrahub.core.constants import InfrahubKind from infrahub.core.graph.schema import GRAPH_SCHEMA -from infrahub.exceptions import HTTPServerError -from infrahub.message_bus import messages from infrahub.services import InfrahubServices, services TELEMETRY_KIND: str = "community" @@ -96,30 +94,6 @@ async def gather_anonymous_telemetry_data(service: InfrahubServices) -> dict: return data -@flow(name="telemetry-push-legacy") -async def push( - message: messages.SendTelemetryPush, # pylint: disable=unused-argument - service: InfrahubServices, -) -> None: - service.log.debug("Received telemetry push message...") - - data = await gather_anonymous_telemetry_data(service=service) - service.log.debug(f"Anonymous usage telemetry gathered in {data['execution_time']} seconds.") - - payload = { - "kind": TELEMETRY_KIND, - "payload_format": TELEMETRY_VERSION, - "data": data, - "checksum": hashlib.sha256(json.dumps(data).encode()).hexdigest(), - } - try: - response = await service.http.post(url=config.SETTINGS.main.telemetry_endpoint, json=payload) - except HTTPServerError as exc: - service.log.debug(f"HTTP exception while pushing anonymous telemetry: {exc}") - if not response.is_success: - service.log.debug("HTTP exception while pushing anonymous telemetry", status_code=response.status_code) - - @task(retries=5) async def post_telemetry_data(service: InfrahubServices, url: str, payload: dict[str, Any]) -> None: """Send the telemetry data to the specified URL, using HTTP POST.""" @@ -127,11 +101,14 @@ async def post_telemetry_data(service: InfrahubServices, url: str, payload: dict response.raise_for_status() -@flow +@flow(name="anonymous-telemetry-push") async def send_telemetry_push() -> None: service = services.service - log = get_run_logger() + if config.SETTINGS.main.telemetry_optout: + log.info("Skipping, User opted out of this service.") + return + log.info(f"Pushing anonymous telemetry data to {config.SETTINGS.main.telemetry_endpoint}...") data = await gather_anonymous_telemetry_data(service=service) diff --git a/backend/infrahub/trace.py b/backend/infrahub/trace.py index c332380b61..973dfaef72 100644 --- a/backend/infrahub/trace.py +++ b/backend/infrahub/trace.py @@ -55,7 +55,11 @@ def add_span_exception(exception: Exception) -> None: def create_tracer_provider( - service: str, version: str, exporter_type: str, exporter_endpoint: str = None, exporter_protocol: str = None + service: str, + version: str, + exporter_type: str, + exporter_endpoint: str | None = None, + exporter_protocol: str | None = None, ) -> TracerProvider: # Create a BatchSpanProcessor exporter based on the type if exporter_type == "console": @@ -91,7 +95,11 @@ def create_tracer_provider( def configure_trace( - service: str, version: str, exporter_type: str, exporter_endpoint: str | None = None, exporter_protocol: str = None + service: str, + version: str, + exporter_type: str, + exporter_endpoint: str | None = None, + exporter_protocol: str | None = None, ) -> None: # Create a trace provider with the exporter tracer_provider = create_tracer_provider( diff --git a/backend/infrahub/transformations/tasks.py b/backend/infrahub/transformations/tasks.py index e45cfb0198..a0ef7899ac 100644 --- a/backend/infrahub/transformations/tasks.py +++ b/backend/infrahub/transformations/tasks.py @@ -5,6 +5,7 @@ from infrahub.git.repository import get_initialized_repo from infrahub.log import get_logger from infrahub.services import services +from infrahub.workflows.utils import add_branch_tag from .models import TransformJinjaTemplateData, TransformPythonData @@ -14,6 +15,7 @@ @flow(name="transform-render-python") async def transform_python(message: TransformPythonData) -> Any: service = services.service + await add_branch_tag(branch_name=message.branch) repo = await get_initialized_repo( repository_id=message.repository_id, @@ -36,6 +38,7 @@ async def transform_python(message: TransformPythonData) -> Any: @flow(name="transform-render-jinja2") async def transform_render_jinja2_template(message: TransformJinjaTemplateData) -> str: service = services.service + await add_branch_tag(branch_name=message.branch) repo = await get_initialized_repo( repository_id=message.repository_id, diff --git a/backend/infrahub/workers/infrahub_async.py b/backend/infrahub/workers/infrahub_async.py index 3ce6307e05..2ffe2ca11d 100644 --- a/backend/infrahub/workers/infrahub_async.py +++ b/backend/infrahub/workers/infrahub_async.py @@ -23,10 +23,13 @@ from infrahub.git import initialize_repositories_directory from infrahub.lock import initialize_lock from infrahub.services import InfrahubServices, services +from infrahub.services.adapters.cache import InfrahubCache from infrahub.services.adapters.cache.nats import NATSCache from infrahub.services.adapters.cache.redis import RedisCache +from infrahub.services.adapters.message_bus import InfrahubMessageBus from infrahub.services.adapters.message_bus.nats import NATSMessageBus from infrahub.services.adapters.message_bus.rabbitmq import RabbitMQMessageBus +from infrahub.services.adapters.workflow import InfrahubWorkflow from infrahub.services.adapters.workflow.local import WorkflowLocalExecution from infrahub.services.adapters.workflow.worker import WorkflowWorkerExecution from infrahub.workflows.models import TASK_RESULT_STORAGE_NAME @@ -99,44 +102,8 @@ async def setup( ) ) - if not client: - self._logger.debug(f"Using Infrahub API at {config.SETTINGS.main.internal_address}") - client = InfrahubClient( - config=Config(address=config.SETTINGS.main.internal_address, retry_on_failure=True, log=self._logger) - ) - - try: - await client.branch.all() - except SdkError as exc: - self._logger.error(f"Error in communication with Infrahub: {exc.message}") - raise typer.Exit(1) - - database = InfrahubDatabase(driver=await get_db(retry=1)) - - workflow = config.OVERRIDE.workflow or ( - WorkflowWorkerExecution() - if config.SETTINGS.workflow.driver == config.WorkflowDriver.WORKER - else WorkflowLocalExecution() - ) - - message_bus = config.OVERRIDE.message_bus or ( - NATSMessageBus() if config.SETTINGS.broker.driver == config.BrokerDriver.NATS else RabbitMQMessageBus() - ) - cache = config.OVERRIDE.cache or ( - NATSCache() if config.SETTINGS.cache.driver == config.CacheDriver.NATS else RedisCache() - ) - - service = InfrahubServices( - cache=cache, - client=client, - database=database, - message_bus=message_bus, - workflow=workflow, - component_type=ComponentType.GIT_AGENT, - ) - services.service = service - - await service.initialize() + client = await self._init_infrahub_client(client=client) + service = await self._init_services(client=client) if not registry.schema_has_been_initialized(): initialize_lock(service=service) @@ -178,3 +145,58 @@ async def run( status_code=0, identifier=str(flow_run.id), ) + + async def _init_infrahub_client(self, client: InfrahubClient | None = None) -> InfrahubClient: + if not client: + self._logger.debug(f"Using Infrahub API at {config.SETTINGS.main.internal_address}") + client = InfrahubClient( + config=Config(address=config.SETTINGS.main.internal_address, retry_on_failure=True, log=self._logger) + ) + + try: + await client.branch.all() + except SdkError as exc: + self._logger.error(f"Error in communication with Infrahub: {exc.message}") + raise typer.Exit(1) + + return client + + async def _init_database(self) -> InfrahubDatabase: + return InfrahubDatabase(driver=await get_db(retry=1)) + + async def _init_workflow(self) -> InfrahubWorkflow: + return config.OVERRIDE.workflow or ( + WorkflowWorkerExecution() + if config.SETTINGS.workflow.driver == config.WorkflowDriver.WORKER + else WorkflowLocalExecution() + ) + + async def _init_message_bus(self) -> InfrahubMessageBus: + return config.OVERRIDE.message_bus or ( + NATSMessageBus() if config.SETTINGS.broker.driver == config.BrokerDriver.NATS else RabbitMQMessageBus() + ) + + async def _init_cache(self) -> InfrahubCache: + return config.OVERRIDE.cache or ( + NATSCache() if config.SETTINGS.cache.driver == config.CacheDriver.NATS else RedisCache() + ) + + async def _init_services(self, client: InfrahubClient) -> InfrahubServices: + database = await self._init_database() + workflow = await self._init_workflow() + message_bus = await self._init_message_bus() + cache = await self._init_cache() + + service = InfrahubServices( + cache=cache, + client=client, + database=database, + message_bus=message_bus, + workflow=workflow, + component_type=ComponentType.GIT_AGENT, + ) + + services.service = service + await service.initialize() + + return service diff --git a/backend/infrahub/workflows/catalogue.py b/backend/infrahub/workflows/catalogue.py index a6c469a4d4..856f89a5f4 100644 --- a/backend/infrahub/workflows/catalogue.py +++ b/backend/infrahub/workflows/catalogue.py @@ -1,4 +1,6 @@ -from .constants import WorkflowType +from infrahub.core.constants import BranchSupportType + +from .constants import WorkflowTag, WorkflowType from .models import WorkerPoolDefinition, WorkflowDefinition INFRAHUB_WORKER_POOL = WorkerPoolDefinition( @@ -6,9 +8,9 @@ ) WEBHOOK_SEND = WorkflowDefinition( - name="webhook_send", + name="event-send-webhook", type=WorkflowType.USER, - module="infrahub.message_bus.operations.send.webhook", + module="infrahub.send.webhook", function="send_webhook", ) @@ -17,6 +19,7 @@ type=WorkflowType.USER, module="infrahub.transformations.tasks", function="transform_render_jinja2_template", + branch_support=BranchSupportType.AWARE, ) TRANSFORM_PYTHON_RENDER = WorkflowDefinition( @@ -24,13 +27,14 @@ type=WorkflowType.USER, module="infrahub.transformations.tasks", function="transform_python", + branch_support=BranchSupportType.AWARE, ) ANONYMOUS_TELEMETRY_SEND = WorkflowDefinition( name="anonymous_telemetry_send", type=WorkflowType.INTERNAL, cron="0 2 * * *", - module="infrahub.message_bus.operations.send.telemetry", + module="infrahub.tasks.telemetry", function="send_telemetry_push", ) @@ -39,6 +43,8 @@ type=WorkflowType.INTERNAL, module="infrahub.core.migrations.schema.tasks", function="schema_apply_migrations", + branch_support=BranchSupportType.AWARE, + tags=[WorkflowTag.DATABASE_CHANGE], ) SCHEMA_VALIDATE_MIGRATION = WorkflowDefinition( @@ -46,6 +52,14 @@ type=WorkflowType.INTERNAL, module="infrahub.core.validators.tasks", function="schema_validate_migrations", + branch_support=BranchSupportType.AWARE, +) + +TRIGGER_ARTIFACT_DEFINITION_GENERATE = WorkflowDefinition( + name="artifact-definition-generate", + type=WorkflowType.INTERNAL, + module="infrahub.git.tasks", + function="generate_artifact_definition", ) IPAM_RECONCILIATION = WorkflowDefinition( @@ -53,12 +67,49 @@ type=WorkflowType.INTERNAL, module="infrahub.core.ipam.tasks", function="ipam_reconciliation", + branch_support=BranchSupportType.AWARE, + tags=[WorkflowTag.DATABASE_CHANGE], +) + +REQUEST_GENERATOR_RUN = WorkflowDefinition( + name="generator-run", + type=WorkflowType.INTERNAL, + module="infrahub.generators.tasks", + function="run_generator", +) + +REQUEST_ARTIFACT_GENERATE = WorkflowDefinition( + name="artifact-generate", + type=WorkflowType.INTERNAL, + module="infrahub.git.tasks", + function="generate_artifact", +) + +REQUEST_ARTIFACT_DEFINITION_GENERATE = WorkflowDefinition( + name="artifact-definition-generate", + type=WorkflowType.INTERNAL, + module="infrahub.git.tasks", + function="generate_request_artifact_definition", +) + +REQUEST_DIFF_UPDATE = WorkflowDefinition( + name="diff-update", + type=WorkflowType.INTERNAL, + module="infrahub.core.diff.tasks", + function="update_diff", +) + +REQUEST_DIFF_REFRESH = WorkflowDefinition( + name="diff-refresh", + type=WorkflowType.INTERNAL, + module="infrahub.core.diff.tasks", + function="refresh_diff", ) GIT_REPOSITORIES_SYNC = WorkflowDefinition( name="git_repositories_sync", type=WorkflowType.INTERNAL, - cron="*/10 * * * *", + cron="* * * * *", module="infrahub.git.tasks", function="sync_remote_repositories", ) @@ -68,6 +119,25 @@ type=WorkflowType.INTERNAL, module="infrahub.git.tasks", function="create_branch", + branch_support=BranchSupportType.AWARE, + tags=[WorkflowTag.DATABASE_CHANGE], +) +BRANCH_REBASE = WorkflowDefinition( + name="branch-rebase", + type=WorkflowType.INTERNAL, + module="infrahub.core.branch.tasks", + function="rebase_branch", + branch_support=BranchSupportType.AWARE, + tags=[WorkflowTag.DATABASE_CHANGE], +) + +BRANCH_MERGE = WorkflowDefinition( + name="branch-merge", + type=WorkflowType.INTERNAL, + module="infrahub.core.branch.tasks", + function="merge_branch", + branch_support=BranchSupportType.AWARE, + tags=[WorkflowTag.DATABASE_CHANGE], ) worker_pools = [INFRAHUB_WORKER_POOL] @@ -79,7 +149,15 @@ ANONYMOUS_TELEMETRY_SEND, SCHEMA_APPLY_MIGRATION, SCHEMA_VALIDATE_MIGRATION, + TRIGGER_ARTIFACT_DEFINITION_GENERATE, IPAM_RECONCILIATION, GIT_REPOSITORIES_SYNC, GIT_REPOSITORIES_CREATE_BRANCH, + REQUEST_ARTIFACT_GENERATE, + BRANCH_REBASE, + BRANCH_MERGE, + REQUEST_ARTIFACT_DEFINITION_GENERATE, + REQUEST_GENERATOR_RUN, + REQUEST_DIFF_UPDATE, + REQUEST_DIFF_REFRESH, ] diff --git a/backend/infrahub/workflows/constants.py b/backend/infrahub/workflows/constants.py index ff650c935d..1597b2a1cb 100644 --- a/backend/infrahub/workflows/constants.py +++ b/backend/infrahub/workflows/constants.py @@ -4,3 +4,18 @@ class WorkflowType(InfrahubStringEnum): INTERNAL = "internal" USER = "user" + + +TAG_NAMESPACE = "infrahub.app" + + +class WorkflowTag(InfrahubStringEnum): + BRANCH = "branch/{identifier}" + WORKFLOWTYPE = "workflow-type/{identifier}" + DATABASE_CHANGE = "database-change" + + def render(self, identifier: str | None = None) -> str: + if identifier is None: + return f"{TAG_NAMESPACE}/{self.value}" + rendered_value = str(self.value).format(identifier=identifier) + return f"{TAG_NAMESPACE}/{rendered_value}" diff --git a/backend/infrahub/workflows/models.py b/backend/infrahub/workflows/models.py index a17bc39e2b..cde6e63320 100644 --- a/backend/infrahub/workflows/models.py +++ b/backend/infrahub/workflows/models.py @@ -6,12 +6,13 @@ from prefect.client.schemas.actions import DeploymentScheduleCreate from prefect.client.schemas.objects import FlowRun from prefect.client.schemas.schedules import CronSchedule -from pydantic import BaseModel +from pydantic import BaseModel, Field from typing_extensions import Self from infrahub import __version__ +from infrahub.core.constants import BranchSupportType -from .constants import WorkflowType +from .constants import WorkflowTag, WorkflowType TASK_RESULT_STORAGE_NAME = "infrahub-storage" @@ -39,6 +40,8 @@ class WorkflowDefinition(BaseModel): module: str function: str cron: str | None = None + branch_support: BranchSupportType = BranchSupportType.AGNOSTIC + tags: list[WorkflowTag] = Field(default_factory=list) @property def entrypoint(self) -> str: @@ -49,16 +52,19 @@ def full_name(self) -> str: return f"{self.name}/{self.name}" def to_deployment(self) -> dict[str, Any]: - payload: dict[str, Any] = { - "name": self.name, - "entrypoint": self.entrypoint, - } + payload: dict[str, Any] = {"name": self.name, "entrypoint": self.entrypoint, "tags": self.get_tags()} if self.type == WorkflowType.INTERNAL: payload["version"] = __version__ if self.cron: payload["schedules"] = [DeploymentScheduleCreate(schedule=CronSchedule(cron=self.cron))] + return payload + def get_tags(self) -> list[str]: + tags: list[str] = [WorkflowTag.WORKFLOWTYPE.render(identifier=self.type.value)] + tags += [tag.render() for tag in self.tags] + return tags + async def save(self, client: PrefectClient, work_pool: WorkerPoolDefinition) -> UUID: flow_id = await client.create_flow_from_name(self.name) data = self.to_deployment() diff --git a/backend/infrahub/workflows/utils.py b/backend/infrahub/workflows/utils.py new file mode 100644 index 0000000000..b32e0676fc --- /dev/null +++ b/backend/infrahub/workflows/utils.py @@ -0,0 +1,13 @@ +from prefect import get_client, task +from prefect.runtime import flow_run + +from .constants import WorkflowTag + + +@task(name="add-branch-tag") +async def add_branch_tag(branch_name: str) -> None: + client = get_client(sync_client=False) + current_flow_run_id = flow_run.id + current_tags: list[str] = flow_run.tags + new_tags = current_tags + [WorkflowTag.BRANCH.render(identifier=branch_name)] + await client.update_flow_run(current_flow_run_id, tags=list(new_tags)) diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py index 354032868c..3b86ed5b6e 100644 --- a/backend/tests/conftest.py +++ b/backend/tests/conftest.py @@ -379,6 +379,7 @@ def reload_settings_before_each_module(tmpdir_factory): # Other settings config.SETTINGS.storage.driver = config.StorageDriver.FileSystemStorage + config.SETTINGS.workflow.driver = config.WorkflowDriver.LOCAL storage_dir = tmpdir_factory.mktemp("storage") config.SETTINGS.storage.local.path_ = str(storage_dir) diff --git a/backend/tests/helpers/graphql.py b/backend/tests/helpers/graphql.py index 805991a8ad..13be523a9f 100644 --- a/backend/tests/helpers/graphql.py +++ b/backend/tests/helpers/graphql.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any from graphql import ExecutionResult, graphql @@ -16,10 +16,10 @@ async def graphql_mutation( query: str, db: InfrahubDatabase, - branch: Optional[Branch] = None, - variables: Optional[dict[str, Any]] = None, - service: Optional[InfrahubServices] = None, - account_session: Optional[AccountSession] = None, + branch: Branch | None = None, + variables: dict[str, Any] | None = None, + service: InfrahubServices | None = None, + account_session: AccountSession | None = None, ) -> ExecutionResult: branch = branch or await Branch.get_by_name(name="main", db=db) service = service or services.service @@ -45,8 +45,8 @@ async def graphql_query( query: str, db: InfrahubDatabase, branch: Branch, - variables: Optional[dict[str, Any]] = None, - service: Optional[InfrahubServices] = None, + variables: dict[str, Any] | None = None, + service: InfrahubServices | None = None, ) -> ExecutionResult: service = service or services.service diff --git a/backend/tests/helpers/query_benchmark/db_query_profiler.py b/backend/tests/helpers/query_benchmark/db_query_profiler.py index cfdd435d07..a8bb07b516 100644 --- a/backend/tests/helpers/query_benchmark/db_query_profiler.py +++ b/backend/tests/helpers/query_benchmark/db_query_profiler.py @@ -110,7 +110,11 @@ def get_context(self) -> dict[str, Any]: return ctx async def execute_query_with_metadata( - self, query: str, params: dict[str, Any] | None = None, name: str | None = "undefined" + self, + query: str, + params: dict[str, Any] | None = None, + name: str = "undefined", + context: dict[str, str] | None = None, ) -> tuple[list[Record], dict[str, Any]]: if not self.profiling_enabled: # Profiling might be disabled to avoid capturing queries while loading data diff --git a/backend/tests/helpers/schema/__init__.py b/backend/tests/helpers/schema/__init__.py index 36dd846665..a066165800 100644 --- a/backend/tests/helpers/schema/__init__.py +++ b/backend/tests/helpers/schema/__init__.py @@ -9,6 +9,7 @@ from .manufacturer import MANUFACTURER from .person import PERSON from .ticket import TICKET +from .widget import WIDGET if TYPE_CHECKING: from infrahub.database import InfrahubDatabase @@ -29,4 +30,4 @@ async def load_schema(db: InfrahubDatabase, schema: SchemaRoot, branch_name: str ) -__all__ = ["CAR", "CAR_SCHEMA", "MANUFACTURER", "PERSON", "TICKET"] +__all__ = ["CAR", "CAR_SCHEMA", "MANUFACTURER", "PERSON", "TICKET", "WIDGET"] diff --git a/backend/tests/helpers/schema/widget.py b/backend/tests/helpers/schema/widget.py new file mode 100644 index 0000000000..82c2180ec5 --- /dev/null +++ b/backend/tests/helpers/schema/widget.py @@ -0,0 +1,16 @@ +from infrahub.core.schema import AttributeSchema, NodeSchema + +WIDGET = NodeSchema( + name="Widget", + namespace="Testing", + label="Widget", + default_filter="name__value", + order_by=["name__value"], + attributes=[ + AttributeSchema(name="name", kind="Text", optional=False), + AttributeSchema(name="description", kind="Text", optional=True), + AttributeSchema(name="height", kind="Number", optional=True), + AttributeSchema(name="weight", kind="Number", optional=True), + ], + inherit_from=["LineageOwner", "LineageSource"], +) diff --git a/backend/tests/helpers/test_app.py b/backend/tests/helpers/test_app.py index 24027e5934..6dc9a6339e 100644 --- a/backend/tests/helpers/test_app.py +++ b/backend/tests/helpers/test_app.py @@ -23,6 +23,7 @@ from infrahub.core.utils import delete_all_nodes from infrahub.database import InfrahubDatabase from infrahub.server import app, app_initialization +from infrahub.services import services from infrahub.services.adapters.workflow.local import WorkflowLocalExecution from tests.adapters.message_bus import BusSimulator @@ -95,6 +96,9 @@ async def register_core_schema( async def test_client( self, initialize_registry: None, redis: dict[int, int] | None, nats: dict[int, int] | None ) -> InfrahubTestClient: + # This call emits an ERROR because it calls registry-webhook-config-refresh flow within a local worker + # while services.service.client is not set. There might be a design issue here: a client is needed while + # the app is being initialized. await app_initialization(app) return InfrahubTestClient(app=app) @@ -110,7 +114,17 @@ async def client( bus_simulator.service._client = sdk_client - return sdk_client + # Some tests rely on infrahub worker which runs locally during testing. Thus, code supposed to run + # on worker rely on server's `services.service`, which is not initialized with a client, + # instead of the worker one. Thus, we temporarily set `services.service.client` + # here to mock worker's `services.service`. + assert isinstance( + services.service.workflow, WorkflowLocalExecution + ), "These tests are currently meant to run with a local worker" + original_service_client = services.service._client + services.service.set_client(sdk_client) + yield sdk_client + services.service.set_client(original_service_client) @pytest.fixture(scope="class") async def initialize_registry( @@ -122,4 +136,5 @@ async def initialize_registry( administrator_role = await create_super_administrator_role(db=db) await create_super_administrators_group(db=db, role=administrator_role, admin_accounts=[admin_account]) + # This call emits a warning related to the fact database index manager has not been initialized. await initialization(db=db) diff --git a/backend/tests/integration/diff/test_diff_merge.py b/backend/tests/integration/diff/test_diff_merge.py index 64a6803a88..44422e3249 100644 --- a/backend/tests/integration/diff/test_diff_merge.py +++ b/backend/tests/integration/diff/test_diff_merge.py @@ -117,9 +117,9 @@ async def test_select_cardinality_one_resolution_and_merge( marty_id = initial_dataset["marty"].get_id() enriched_diff = await diff_coordinator.update_branch_diff(base_branch=default_branch, diff_branch=diff_branch) - conflicts = enriched_diff.get_all_conflicts() - assert len(conflicts) == 1 - owner_conflict = conflicts[0] + conflicts_map = enriched_diff.get_all_conflicts() + assert len(conflicts_map) == 1 + owner_conflict = list(conflicts_map.values())[0] await diff_repository.update_conflict_by_id( conflict_id=owner_conflict.uuid, selection=ConflictSelection.BASE_BRANCH ) diff --git a/backend/tests/integration/diff/test_diff_rebase.py b/backend/tests/integration/diff/test_diff_rebase.py index 312c7a365e..411e1c65b1 100644 --- a/backend/tests/integration/diff/test_diff_rebase.py +++ b/backend/tests/integration/diff/test_diff_rebase.py @@ -7,7 +7,7 @@ from infrahub import config, lock from infrahub.core.constants import DiffAction, InfrahubKind from infrahub.core.constants.database import DatabaseEdgeType -from infrahub.core.diff.model.path import BranchTrackingId, ConflictSelection +from infrahub.core.diff.model.path import BranchTrackingId from infrahub.core.diff.repository.repository import DiffRepository from infrahub.core.initialization import create_branch from infrahub.core.manager import NodeManager @@ -297,6 +297,7 @@ async def test_merge_causes_diff_update( cyberdyne_id = initial_dataset["cyberdyne"].id omnicorp_id = initial_dataset["omnicorp"].id before_merge = Timestamp() + result = await client.execute_graphql(query=BRANCH_MERGE, variables={"branch": branch_1.name}) assert result["BranchMerge"]["ok"] @@ -370,41 +371,22 @@ async def test_merge_causes_diff_update( assert prop_diff.conflict is None async def test_resolve_conflict( - self, client: InfrahubClient, branch_2: Branch, diff_repository: DiffRepository, initial_dataset + self, + db: InfrahubDatabase, + branch_2: Branch, + initial_dataset, ): + kara_id = initial_dataset["kara"].id + jesko_id = initial_dataset["jesko"].id cyberdyne_id = initial_dataset["cyberdyne"].id - branch_2_diff = await diff_repository.get_one( - diff_branch_name=branch_2.name, tracking_id=BranchTrackingId(name=branch_2.name) - ) - conflicts = branch_2_diff.get_all_conflicts() - attr_conflict = None - peer_conflict = None - for conflict in conflicts: - if conflict.base_branch_value == "branch-1-description": - attr_conflict = conflict - elif conflict.base_branch_value == cyberdyne_id: - peer_conflict = conflict - assert attr_conflict - assert peer_conflict - - result = await client.execute_graphql( - query=CONFLICT_SELECTION_QUERY, - variables={"conflict_id": attr_conflict.uuid, "selected_branch": ConflictSelection.DIFF_BRANCH.name}, - ) - assert result["ResolveDiffConflict"]["ok"] - result = await client.execute_graphql( - query=CONFLICT_SELECTION_QUERY, - variables={"conflict_id": peer_conflict.uuid, "selected_branch": ConflictSelection.BASE_BRANCH.name}, - ) - assert result["ResolveDiffConflict"]["ok"] - branch_2_diff = await diff_repository.get_one( - diff_branch_name=branch_2.name, tracking_id=BranchTrackingId(name=branch_2.name) - ) - updated_conflicts = branch_2_diff.get_all_conflicts() - conflicts_by_id = {c.uuid: c for c in updated_conflicts} - assert conflicts_by_id[attr_conflict.uuid].selected_branch is ConflictSelection.DIFF_BRANCH - assert conflicts_by_id[peer_conflict.uuid].selected_branch is ConflictSelection.BASE_BRANCH + kara_main = await NodeManager.get_one(db=db, id=kara_id) + kara_main.description.value = "branch-2-description" + await kara_main.save(db=db) + + jesko_branch = await NodeManager.get_one(db=db, branch=branch_2, id=jesko_id) + await jesko_branch.manufacturer.update(db=db, data=cyberdyne_id) + await jesko_branch.save(db=db) async def test_rebase_causes_diff_recalculation( self, @@ -414,11 +396,11 @@ async def test_rebase_causes_diff_recalculation( branch_2: Branch, diff_repository: DiffRepository, ): - kara_id = initial_dataset["kara"].id jesko_id = initial_dataset["jesko"].id koenigsegg_id = initial_dataset["koenigsegg"].id - omnicorp_id = initial_dataset["omnicorp"].id + cyberdyne_id = initial_dataset["cyberdyne"].id before_rebase = Timestamp() + result = await client.execute_graphql(query=BRANCH_REBASE, variables={"branch": branch_2.name}) assert result["BranchRebase"]["ok"] @@ -426,26 +408,17 @@ async def test_rebase_causes_diff_recalculation( diff_branch_name=branch_2.name, tracking_id=BranchTrackingId(name=branch_2.name) ) - assert len(branch_2_diff.nodes) == 4 + assert len(branch_2_diff.nodes) == 3 assert branch_2_diff.to_time > before_rebase nodes_by_id = {n.uuid: n for n in branch_2_diff.nodes} - kara_node = nodes_by_id[kara_id] - assert len(kara_node.attributes) == 1 - description_attr = kara_node.attributes.pop() - assert description_attr.name == "description" - assert len(description_attr.properties) == 1 - value_prop = description_attr.properties.pop() - assert value_prop.property_type is DatabaseEdgeType.HAS_VALUE - assert value_prop.previous_value == "branch-1-description" - assert value_prop.new_value == "branch-2-description" - assert value_prop.conflict is None + assert set(nodes_by_id.keys()) == {jesko_id, cyberdyne_id, koenigsegg_id} jesko_node = nodes_by_id[jesko_id] assert len(jesko_node.relationships) == 1 manufacturer_rel = jesko_node.relationships.pop() assert manufacturer_rel.name == "manufacturer" assert len(manufacturer_rel.relationships) == 1 manufacturer_element = manufacturer_rel.relationships.pop() - assert manufacturer_element.peer_id == omnicorp_id + assert manufacturer_element.peer_id == cyberdyne_id assert manufacturer_element.action is DiffAction.UPDATED assert manufacturer_element.conflict is None assert len(manufacturer_element.properties) == 1 @@ -453,9 +426,9 @@ async def test_rebase_causes_diff_recalculation( assert related_prop.property_type is DatabaseEdgeType.IS_RELATED assert related_prop.action is DiffAction.UPDATED assert related_prop.previous_value == koenigsegg_id - assert related_prop.new_value == omnicorp_id + assert related_prop.new_value == cyberdyne_id assert related_prop.conflict is None - for manufacturer_id, expected_action in ((koenigsegg_id, DiffAction.REMOVED), (omnicorp_id, DiffAction.ADDED)): + for manufacturer_id, expected_action in ((koenigsegg_id, DiffAction.REMOVED), (cyberdyne_id, DiffAction.ADDED)): manufacturer_node = nodes_by_id[manufacturer_id] assert len(manufacturer_node.relationships) == 1 cars_rel = manufacturer_node.relationships.pop() diff --git a/backend/tests/integration/git/test_readonly_repository.py b/backend/tests/integration/git/test_readonly_repository.py index 154050a657..b700dbf287 100644 --- a/backend/tests/integration/git/test_readonly_repository.py +++ b/backend/tests/integration/git/test_readonly_repository.py @@ -8,9 +8,10 @@ from infrahub.core.constants import InfrahubKind from infrahub.core.manager import NodeManager from infrahub.core.node import Node +from infrahub.git.models import RequestArtifactDefinitionGenerate from infrahub.lock import InfrahubLockRegistry -from infrahub.message_bus.messages import RequestArtifactDefinitionGenerate -from infrahub.services import InfrahubServices +from infrahub.services import services +from infrahub.workflows.catalogue import REQUEST_ARTIFACT_DEFINITION_GENERATE from tests.constants import TestKind from tests.helpers.file_repo import FileRepo from tests.helpers.schema import CAR_SCHEMA, load_schema @@ -83,12 +84,21 @@ async def test_step01_create_repository( assert repository.commit.value assert check_definition.file_path.value == "checks/car_overview.py" - async def test_step02_validate_generated_artifacts(self, db: InfrahubDatabase, client: InfrahubClient): + async def test_step02_validate_generated_artifacts( + self, + db: InfrahubDatabase, + client: InfrahubClient, + ): artifacts = await client.all(kind=InfrahubKind.ARTIFACT, branch="ro_repository") assert artifacts assert artifacts[0].name.value == "Ownership report" - async def test_step03_merge_branch(self, db: InfrahubDatabase, client: InfrahubClient, helper: TestHelper): + async def test_step03_merge_branch( + self, + db: InfrahubDatabase, + client: InfrahubClient, + helper: TestHelper, + ): await client.branch.merge(branch_name="ro_repository") check_definition: CoreCheckDefinition = await NodeManager.get_one_by_id_or_default_filter( @@ -96,14 +106,12 @@ async def test_step03_merge_branch(self, db: InfrahubDatabase, client: InfrahubC ) assert check_definition.file_path.value == "checks/car_overview.py" - bus_simulator = helper.get_message_bus_simulator() - service = InfrahubServices(client=client, message_bus=bus_simulator) - bus_simulator.service = service - artifact_definitions = await client.all(kind=InfrahubKind.ARTIFACTDEFINITION) + for artifact_definition in artifact_definitions: - await service.send( - message=RequestArtifactDefinitionGenerate(artifact_definition=artifact_definition.id, branch="main") + model = RequestArtifactDefinitionGenerate(artifact_definition=artifact_definition.id, branch="main") + await services.service.workflow.submit_workflow( + REQUEST_ARTIFACT_DEFINITION_GENERATE, parameters={"model": model} ) artifacts = await client.all(kind=InfrahubKind.ARTIFACT) diff --git a/backend/tests/integration/ipam/test_ipam_merge_reconcile.py b/backend/tests/integration/ipam/test_ipam_merge_reconcile.py index c6738583ca..59497dd2d0 100644 --- a/backend/tests/integration/ipam/test_ipam_merge_reconcile.py +++ b/backend/tests/integration/ipam/test_ipam_merge_reconcile.py @@ -40,7 +40,12 @@ async def branch_2(self, db: InfrahubDatabase): return await create_branch(db=db, branch_name="delete_prefix") async def test_step01_add_address( - self, db: InfrahubDatabase, initial_dataset, client: InfrahubClient, branch_1, new_address_1 + self, + db: InfrahubDatabase, + initial_dataset, + client: InfrahubClient, + branch_1, + new_address_1, ) -> None: success = await client.branch.merge(branch_name=branch_1.name) assert success is True @@ -51,7 +56,12 @@ async def test_step01_add_address( assert parent_rels[0].peer_id == initial_dataset["net140"].id async def test_step02_add_delete_prefix( - self, db: InfrahubDatabase, initial_dataset, client: InfrahubClient, branch_2, new_address_1 + self, + db: InfrahubDatabase, + initial_dataset, + client: InfrahubClient, + branch_2, + new_address_1, ) -> None: prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=branch_2) new_prefix = await Node.init(schema=prefix_schema, db=db, branch=registry.default_branch) diff --git a/backend/tests/integration/ipam/test_ipam_utilization.py b/backend/tests/integration/ipam/test_ipam_utilization.py index 742f628311..9ff0257a27 100644 --- a/backend/tests/integration/ipam/test_ipam_utilization.py +++ b/backend/tests/integration/ipam/test_ipam_utilization.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Union +from typing import TYPE_CHECKING import pytest from graphql import graphql @@ -63,7 +63,7 @@ async def initial_dataset( db: InfrahubDatabase, initialize_registry: None, register_ipam_schema, - ) -> dict[str, Union[Node, list[Node]]]: + ) -> dict[str, Node | list[Node]]: await create_ipam_namespace(db=db) default_ipnamespace = await get_default_ipnamespace(db=db) default_branch = registry.default_branch @@ -119,9 +119,7 @@ async def branch2(self, db: InfrahubDatabase) -> Branch: return await create_branch(db=db, branch_name="branch2") @pytest.fixture(scope="class") - async def step_02_dataset( - self, db: InfrahubDatabase, initial_dataset, branch2 - ) -> dict[str, Union[Node, list[Node]]]: + async def step_02_dataset(self, db: InfrahubDatabase, initial_dataset, branch2) -> dict[str, Node | list[Node]]: prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=branch2) address_schema = registry.schema.get_node_schema(name="IpamIPAddress", branch=branch2) container = initial_dataset["container"] diff --git a/backend/tests/integration/ipam/test_proposed_change_reconcile.py b/backend/tests/integration/ipam/test_proposed_change_reconcile.py index aa8d8f3fc6..0aac243a5a 100644 --- a/backend/tests/integration/ipam/test_proposed_change_reconcile.py +++ b/backend/tests/integration/ipam/test_proposed_change_reconcile.py @@ -51,7 +51,12 @@ async def branch_2(self, db: InfrahubDatabase): return await create_branch(db=db, branch_name="delete_prefix") async def test_step01_add_address( - self, db: InfrahubDatabase, initial_dataset, client: InfrahubClient, branch_1, new_address_1 + self, + db: InfrahubDatabase, + initial_dataset, + client: InfrahubClient, + branch_1, + new_address_1, ) -> None: proposed_change_create = await client.create( kind=InfrahubKind.PROPOSEDCHANGE, @@ -67,7 +72,12 @@ async def test_step01_add_address( assert parent_rels[0].peer_id == initial_dataset["net140"].id async def test_step02_add_delete_prefix( - self, db: InfrahubDatabase, initial_dataset, client: InfrahubClient, branch_2, new_address_1 + self, + db: InfrahubDatabase, + initial_dataset, + client: InfrahubClient, + branch_2, + new_address_1, ) -> None: proposed_change_create = await client.create( kind=InfrahubKind.PROPOSEDCHANGE, diff --git a/backend/tests/integration/proposed_change/test_proposed_change_repository.py b/backend/tests/integration/proposed_change/test_proposed_change_repository.py index 592deb5168..4bfd6c8cad 100644 --- a/backend/tests/integration/proposed_change/test_proposed_change_repository.py +++ b/backend/tests/integration/proposed_change/test_proposed_change_repository.py @@ -72,7 +72,10 @@ async def initial_dataset( await richard.save(db=db) async def test_create_proposed_change( - self, db: InfrahubDatabase, initial_dataset: None, client: InfrahubClient + self, + db: InfrahubDatabase, + initial_dataset: None, + client: InfrahubClient, ) -> None: proposed_change_create = await client.create( kind=InfrahubKind.PROPOSEDCHANGE, diff --git a/backend/tests/integration/schema_lifecycle/shared.py b/backend/tests/integration/schema_lifecycle/shared.py index ca43eb61de..a5fc2fc843 100644 --- a/backend/tests/integration/schema_lifecycle/shared.py +++ b/backend/tests/integration/schema_lifecycle/shared.py @@ -1,3 +1,4 @@ +import copy from typing import Any, Dict import pytest @@ -34,13 +35,13 @@ def schema_person_02_first_last(self, schema_person_base) -> Dict[str, Any]: """Rename the attribute name to firstname and add a new lastname attribute.""" assert schema_person_base["attributes"][0]["name"] == "name" schema_person_base["attributes"][0]["name"] = "firstname" - schema_person_base["attributes"].append({"name": "lastname", "kind": "Text"}) + schema_person_base["attributes"].append({"name": "lastname", "kind": "Text", "optional": True}) return schema_person_base @pytest.fixture(scope="class") def schema_person_03_no_height(self, schema_person_02_first_last) -> Dict[str, Any]: """Remove the attribute height.""" - person = schema_person_02_first_last + person = copy.deepcopy(schema_person_02_first_last) assert person["attributes"][2]["name"] == "height" person["attributes"][2]["state"] = "absent" return person diff --git a/backend/tests/integration/schema_lifecycle/test_schema_attribute_remove_add.py b/backend/tests/integration/schema_lifecycle/test_schema_attribute_remove_add.py new file mode 100644 index 0000000000..07d14e3e0d --- /dev/null +++ b/backend/tests/integration/schema_lifecycle/test_schema_attribute_remove_add.py @@ -0,0 +1,211 @@ +from typing import Any, Optional + +import pytest +from infrahub_sdk import InfrahubClient + +from infrahub.core import registry +from infrahub.core.branch import Branch +from infrahub.core.node import Node +from infrahub.database import InfrahubDatabase +from infrahub.exceptions import InitializationError + +from ..shared import load_schema +from .shared import ( + CAR_KIND, + MANUFACTURER_KIND_01, + PERSON_KIND, + TAG_KIND, + TestSchemaLifecycleBase, +) + +# pylint: disable=unused-argument + + +class BranchState: + def __init__(self) -> None: + self._branch: Optional[Branch] = None + + @property + def branch(self) -> Branch: + if self._branch: + return self._branch + raise InitializationError + + @branch.setter + def branch(self, value: Branch) -> None: + self._branch = value + + +state = BranchState() + + +# --------------------------------- +# This test was initially written to troubleshoot and fix https://github.com/opsmill/infrahub/issues/4727 +# The issue was primarily happening in Main +# --------------------------------- +class TestSchemaLifecycleAttributeRemoveAddMain(TestSchemaLifecycleBase): + @property + def branch1(self) -> Branch: + return state.branch + + @pytest.fixture(scope="class") + async def initial_dataset(self, db: InfrahubDatabase, initialize_registry, schema_step01): + await load_schema(db=db, schema=schema_step01) + + # Load data in the MAIN branch first + john = await Node.init(schema=PERSON_KIND, db=db) + await john.new(db=db, firstname="John", lastname="Doe", height=175, description="The famous Joe Doe") + await john.save(db=db) + + renault = await Node.init(schema=MANUFACTURER_KIND_01, db=db) + await renault.new( + db=db, name="renault", description="Groupe Renault is a French multinational automobile manufacturer" + ) + await renault.save(db=db) + + megane = await Node.init(schema=CAR_KIND, db=db) + await megane.new( + db=db, name="Megane", description="Renault Megane", color="#c93420", manufacturer=renault, owner=john + ) + await megane.save(db=db) + + clio = await Node.init(schema=CAR_KIND, db=db) + await clio.new( + db=db, name="Clio", description="Renault Clio", color="#ff3420", manufacturer=renault, owner=john + ) + await clio.save(db=db) + + red = await Node.init(schema=TAG_KIND, db=db) + await red.new(db=db, name="red", persons=[john]) + await red.save(db=db) + + objs = { + "john": john.id, + "renault": renault.id, + "megane": megane.id, + "clio": clio.id, + "red": red.id, + } + + return objs + + @pytest.fixture(scope="class") + def schema_step01( + self, schema_car_base, schema_person_02_first_last, schema_manufacturer_base, schema_tag_base + ) -> dict[str, Any]: + return { + "version": "1.0", + "nodes": [schema_person_02_first_last, schema_car_base, schema_manufacturer_base, schema_tag_base], + } + + @pytest.fixture(scope="class") + def schema_step02( + self, schema_car_base, schema_person_03_no_height, schema_manufacturer_base, schema_tag_base + ) -> dict[str, Any]: + return { + "version": "1.0", + "nodes": [schema_person_03_no_height, schema_car_base, schema_manufacturer_base, schema_tag_base], + } + + @pytest.fixture(scope="class") + def schema_step03( + self, schema_car_base, schema_person_02_first_last, schema_manufacturer_base, schema_tag_base + ) -> dict[str, Any]: + return { + "version": "1.0", + "nodes": [ + schema_person_02_first_last, + schema_car_base, + schema_manufacturer_base, + schema_tag_base, + ], + } + + async def test_step01_baseline_backend(self, db: InfrahubDatabase, initial_dataset): + persons = await registry.manager.query(db=db, schema=PERSON_KIND) + assert len(persons) == 1 + + async def test_step02_check_attr_add_rename( + self, db: InfrahubDatabase, client: InfrahubClient, initial_dataset, schema_step02 + ): + success, response = await client.schema.check(schemas=[schema_step02]) + assert success + assert response == { + "diff": { + "added": {}, + "changed": { + "TestingPerson": { + "added": {}, + "changed": { + "attributes": { + "added": {}, + "changed": {}, + "removed": {"height": None}, + }, + }, + "removed": {}, + }, + }, + "removed": {}, + }, + } + + async def test_step02_load(self, db: InfrahubDatabase, client: InfrahubClient, initial_dataset, schema_step02): + response = await client.schema.load(schemas=[schema_step02]) + assert not response.errors + + # Ensure that we can query the nodes with the new schema in BRANCH1 + persons = await registry.manager.query( + db=db, + schema=PERSON_KIND, + filters={"firstname__value": "John"}, # , branch=self.branch1 + ) + assert len(persons) == 1 + john = persons[0] + assert john.firstname.value == "John" # type: ignore[attr-defined] + assert not hasattr(john, "height") + + async def test_step03_check(self, db: InfrahubDatabase, client: InfrahubClient, initial_dataset, schema_step03): + success, response = await client.schema.check(schemas=[schema_step03]) + assert response == { + "diff": { + "added": {}, + "changed": { + "TestingPerson": { + "added": {}, + "changed": { + "attributes": {"added": {"height": None}, "changed": {}, "removed": {}}, + }, + "removed": {}, + }, + }, + "removed": {}, + }, + } + assert success + + async def test_step03_load(self, db: InfrahubDatabase, client: InfrahubClient, initial_dataset, schema_step03): + response = await client.schema.load(schemas=[schema_step03]) + assert not response.errors + + # Modify the value for Height in the database + persons = await registry.manager.query( + db=db, + schema=PERSON_KIND, + filters={"firstname__value": "John"}, + ) + assert len(persons) == 1 + john = persons[0] + assert john.height.value is None + john.height.value = 200 + await john.save(db=db) + + # Validate that the new value has been properly saved + persons2 = await registry.manager.query( + db=db, + schema=PERSON_KIND, + filters={"firstname__value": "John"}, + ) + assert len(persons2) == 1 + john2 = persons2[0] + assert john2.height.value == 200 diff --git a/backend/tests/integration/schema_lifecycle/test_schema_migration_branch.py b/backend/tests/integration/schema_lifecycle/test_schema_migration_branch.py index 3031cbfda7..c9cf816c41 100644 --- a/backend/tests/integration/schema_lifecycle/test_schema_migration_branch.py +++ b/backend/tests/integration/schema_lifecycle/test_schema_migration_branch.py @@ -8,6 +8,7 @@ from infrahub.core.initialization import ( create_branch, ) +from infrahub.core.manager import NodeManager from infrahub.core.node import Node from infrahub.database import InfrahubDatabase from infrahub.exceptions import InitializationError, SchemaNotFoundError @@ -278,6 +279,9 @@ async def test_step03_check(self, db: InfrahubDatabase, client: InfrahubClient, async def test_step03_load(self, db: InfrahubDatabase, client: InfrahubClient, initial_dataset, schema_step03): manufacturer_schema = registry.schema.get_node_schema(name=MANUFACTURER_KIND_01, branch=self.branch1) + person_schema = registry.schema.get_node_schema(name=PERSON_KIND, branch=self.branch1) + height_attr_schema = person_schema.get_attribute(name="height") + assert height_attr_schema.id # Insert the ID of the attribute name into the schema in order to rename it firstname assert schema_step03["nodes"][2]["name"] == "CarMaker" @@ -295,6 +299,11 @@ async def test_step03_load(self, db: InfrahubDatabase, client: InfrahubClient, i john = persons[0] assert not hasattr(john, "height") + updated_height_attr_schema = await registry.manager.get_one( + db=db, branch=self.branch1.name, id=height_attr_schema.id + ) + assert updated_height_attr_schema is None + manufacturers = await registry.manager.query( db=db, schema=MANUFACTURER_KIND_03, filters={"name__value": "renault"}, branch=self.branch1.name ) @@ -303,11 +312,18 @@ async def test_step03_load(self, db: InfrahubDatabase, client: InfrahubClient, i renault_cars = await renault.cars.get_peers(db=db) # type: ignore[attr-defined] assert len(renault_cars) == 2 - async def test_rebase(self, db: InfrahubDatabase, client: InfrahubClient, initial_dataset): + async def test_rebase(self, db: InfrahubDatabase, client: InfrahubClient, default_branch: Branch, initial_dataset): branch = await client.branch.rebase(branch_name=self.branch1.name) assert branch + person_schema = registry.schema.get_node_schema(name=PERSON_KIND, branch=default_branch) + height_attr_schema = person_schema.get_attribute(name="height") + assert height_attr_schema.id # Validate that all data added to main after the creation of the branch has been migrated properly + updated_height_attr_schema = await registry.manager.get_one( + db=db, branch=self.branch1.name, id=height_attr_schema.id + ) + assert updated_height_attr_schema is None persons = await registry.manager.query( db=db, schema=PERSON_KIND, filters={"firstname__value": "Jane"}, branch=self.branch1.name ) @@ -349,6 +365,29 @@ async def test_step04_load(self, db: InfrahubDatabase, client: InfrahubClient, i # FIXME after loading the new schema, TestingTag is still present in the branch, need to investigate # assert registry.schema.has(name=TAG_KIND, branch=self.branch1) is False + # check that tag attributes/relationships are deleted on branch + attr_schemas = await NodeManager.query( + db=db, branch=self.branch1, schema="SchemaAttribute", filters={"node__id": tag_schema.id} + ) + assert len(attr_schemas) == 0 + rel_schemas = await NodeManager.query( + db=db, branch=self.branch1, schema="SchemaRelationship", filters={"node__id": tag_schema.id} + ) + assert len(rel_schemas) == 0 + # check that tag attributes/relationships still exist on main + attr_schemas = await NodeManager.query(db=db, schema="SchemaAttribute", filters={"node__id": tag_schema.id}) + assert len(attr_schemas) == 1 + assert {a.name.value for a in attr_schemas} == {"name"} + rel_schemas = await NodeManager.query(db=db, schema="SchemaRelationship", filters={"node__id": tag_schema.id}) + assert len(rel_schemas) == 5 + assert {r.name.value for r in rel_schemas} == { + "cars", + "persons", + "profiles", + "subscriber_of_groups", + "member_of_groups", + } + tags = await registry.manager.query(db=db, schema=TAG_KIND) assert len(tags) == 2 diff --git a/backend/tests/integration/sdk/test_node_create_constraint.py b/backend/tests/integration/sdk/test_node_create_constraint.py index e04b0b9711..74d9a3a3bf 100644 --- a/backend/tests/integration/sdk/test_node_create_constraint.py +++ b/backend/tests/integration/sdk/test_node_create_constraint.py @@ -293,12 +293,12 @@ async def test_step_03_add_node_failure( async def test_create_repository_with_slash_failure(self, db: InfrahubDatabase, initial_dataset): repo = await Node.init(schema="CoreRepository", db=db) with pytest.raises( - ValidationError, match=re.escape("repo/name must be conform with the regex: '^[^/]*$' at name") + ValidationError, match=re.escape("repo/name must conform with the regex: '^[^/]*$' at name") ): await repo.new(db=db, name="repo/name", location="dummy") repo = await Node.init(schema="CoreReadOnlyRepository", db=db) with pytest.raises( - ValidationError, match=re.escape("repo/name must be conform with the regex: '^[^/]*$' at name") + ValidationError, match=re.escape("repo/name must conform with the regex: '^[^/]*$' at name") ): await repo.new(db=db, name="repo/name", location="dummy") diff --git a/backend/tests/integration/services/adapters/message_bus/test_rabbitmq.py b/backend/tests/integration/services/adapters/message_bus/test_rabbitmq.py index 95d98b9554..33d0e3d0b1 100644 --- a/backend/tests/integration/services/adapters/message_bus/test_rabbitmq.py +++ b/backend/tests/integration/services/adapters/message_bus/test_rabbitmq.py @@ -4,7 +4,7 @@ from copy import deepcopy from dataclasses import dataclass from functools import partial -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any from uuid import uuid4 import httpx @@ -109,7 +109,7 @@ async def delete_virtual_host(self) -> None: response = await self._request(method="DELETE", url=f"{self.base_url}/vhosts/{self.settings.virtualhost}") assert response.status_code in {204, 404} - async def _request(self, method: str, url: str, payload: Optional[dict] = None) -> httpx.Response: + async def _request(self, method: str, url: str, payload: dict | None = None) -> httpx.Response: params: dict[str, Any] = {} if payload: params["json"] = payload diff --git a/backend/tests/integration/user_workflows/test_user_worflow.py b/backend/tests/integration/user_workflows/test_user_worflow.py index 406f4b2be9..eca185e466 100644 --- a/backend/tests/integration/user_workflows/test_user_worflow.py +++ b/backend/tests/integration/user_workflows/test_user_worflow.py @@ -3,7 +3,6 @@ from deepdiff import DeepDiff from fastapi.testclient import TestClient -from infrahub.core.constants import NULL_VALUE from infrahub.database import InfrahubDatabase from infrahub.server import app from tests.test_data import dataset01 as ds01 @@ -58,6 +57,84 @@ } """ +DIFF_UPDATE = """ + mutation($branch: String!) { + DiffUpdate(data: {branch: $branch, wait_for_completion: true}) { + ok + } + } +""" + +DIFF_TREE_QUERY = """ +query GetDiffTree($branch: String){ + DiffTree (branch: $branch) { + base_branch + diff_branch + num_added + num_removed + num_updated + num_conflicts + nodes { + uuid + kind + label + status + parent { + uuid + kind + relationship_name + } + contains_conflict + num_added + num_removed + num_updated + num_conflicts + attributes { + name + status + num_added + num_removed + num_updated + num_conflicts + contains_conflict + conflict { uuid } + properties { + property_type + previous_value + new_value + previous_label + new_label + status + conflict { uuid } + } + } + relationships { + name + status + cardinality + contains_conflict + elements { + status + peer_id + contains_conflict + conflict { uuid } + properties { + property_type + previous_value + new_value + previous_label + new_label + status + conflict { uuid } + } + } + } + } + } +} +""" + + BRANCH_CREATE = """ mutation($branch: String!, $isolated: Boolean!) { BranchCreate(data: { name: $branch, is_isolated: $isolated }) { @@ -123,6 +200,7 @@ { ok object { + id name { value } @@ -142,7 +220,7 @@ def __init__(self) -> None: class TestUserWorkflow01: @pytest.fixture(scope="class") - async def client(self, redis, nats): + async def client(self, redis, nats, prefect_test_fixture): client = TestClient(app) return client @@ -151,6 +229,7 @@ async def dataset01(self, db: InfrahubDatabase, init_db_infra): await ds01.load_data(db=db, nbr_devices=2) async def test_initialize_state(self): + state.data["spine1_id"] = None state.data["spine1_lo0_id"] = None state.data["time_start"] = None @@ -171,6 +250,9 @@ async def test_query_all_devices(self, client, init_db_infra, dataset01): assert "InfraDevice" in result.keys() assert len(result["InfraDevice"]["edges"]) == 2 + for device in result["InfraDevice"]["edges"]: + if device["node"]["name"]["value"] == "spine1": + state.data["spine1_id"] = device["node"]["id"] # Initialize the start time state.data["time_start"] = pendulum.now(tz="UTC") @@ -335,111 +417,97 @@ async def test_update_intf_description_main(self, client, init_db_infra, dataset assert intfs[0]["node"]["description"]["value"] == new_description - @pytest.mark.xfail(reason="Investigate as part of the reworking of the diff payload (issue #3265)") async def test_validate_diff_after_description_update(self, client, dataset01, integration_helper): headers = await integration_helper.admin_headers() with client: - response = client.get(f"/api/diff/data?branch={branch1}&branch_only=false", headers=headers) + response = client.post( + "/graphql", + json={"query": DIFF_UPDATE, "variables": {"branch": branch1}}, + headers=headers, + ) + assert response.status_code == 200 + result = response.json() + assert result.get("errors") is None + assert result["data"]["DiffUpdate"]["ok"] is True + + response = client.post( + "/graphql", + json={"query": DIFF_TREE_QUERY, "variables": {"branch": branch1}}, + headers=headers, + ) + assert response.status_code == 200 - assert response.status_code == 200 - assert "errors" not in response.json() - assert response.json() is not None result = response.json() + assert "errors" not in result + assert result - expected_result_branch1 = { - "action": {"branch1": "updated"}, - "display_label": {"branch1": "Loopback0"}, - "elements": { - "description": { - "change": { - "action": "updated", - "branches": ["branch1"], - "id": "17915618-03d7-7f70-4356-1851b7247682", - "properties": {}, - "summary": {"added": 0, "removed": 0, "updated": 1}, - "type": "Attribute", - "value": { - "changes": [ - { - "action": "updated", - "branch": "branch1", - "changed_at": "2023-10-25T11:26:48.387801Z", - "type": "HAS_VALUE", - "value": {"new": "New " "description " "in " "branch1", "previous": NULL_VALUE}, - } - ], - "path": "data/17915618-03d5-2db0-4358-185140cb1203/description/value", - }, + expected_result = { + "base_branch": "main", + "diff_branch": "branch1", + "num_added": 0, + "num_removed": 0, + "num_updated": 1, + "num_conflicts": 0, + "nodes": [ + { + "uuid": state.data["spine1_id"], + "kind": "InfraDevice", + "label": "spine1", + "status": "UNCHANGED", + "parent": None, + "contains_conflict": False, + "num_added": 0, + "num_removed": 0, + "num_updated": 0, + "num_conflicts": 0, + "attributes": [], + "relationships": [], + }, + { + "uuid": state.data["spine1_lo0_id"], + "kind": "InfraInterfaceL3", + "label": "Loopback0", + "status": "UPDATED", + "parent": { + "uuid": state.data["spine1_id"], + "kind": "InfraDevice", + "relationship_name": "interfaces", }, - "name": "description", - "path": "data/17915618-03d5-2db0-4358-185140cb1203/description", - "type": "Attribute", - } - }, - "id": "17915618-03d5-2db0-4358-185140cb1203", - "kind": "InfraInterfaceL3", - "path": "data/17915618-03d5-2db0-4358-185140cb1203", - "summary": {"added": 0, "removed": 0, "updated": 1}, - } - - expected_result_main = { - "action": {"main": "updated"}, - "display_label": {"main": "Ethernet1"}, - "elements": { - "description": { - "change": { - "action": "updated", - "branches": ["main"], - "id": "17915618-15e5-0ca0-435e-18516f4db7c8", - "properties": {}, - "summary": {"added": 0, "removed": 0, "updated": 1}, - "type": "Attribute", - "value": { - "changes": [ + "contains_conflict": False, + "num_added": 0, + "num_removed": 0, + "num_updated": 1, + "num_conflicts": 0, + "relationships": [], + "attributes": [ + { + "name": "description", + "status": "UPDATED", + "num_added": 1, + "num_removed": 0, + "num_updated": 0, + "num_conflicts": 0, + "contains_conflict": False, + "conflict": None, + "properties": [ { - "action": "updated", - "branch": "main", - "changed_at": "2023-10-25T11:26:49.190014Z", - "type": "HAS_VALUE", - "value": {"new": "New " "description " "in " "main", "previous": NULL_VALUE}, + "property_type": "HAS_VALUE", + "previous_value": "NULL", + "new_value": "New description in branch1", + "previous_label": None, + "new_label": None, + "status": "ADDED", + "conflict": None, } ], - "path": "data/17915618-15e2-e1f0-435b-18517dcffdf5/description/value", - }, - }, - "name": "description", - "path": "data/17915618-15e2-e1f0-435b-18517dcffdf5/description", - "type": "Attribute", - } - }, - "id": "17915618-15e2-e1f0-435b-18517dcffdf5", - "kind": "InfraInterfaceL3", - "path": "data/17915618-15e2-e1f0-435b-18517dcffdf5", - "summary": {"added": 0, "removed": 0, "updated": 1}, + } + ], + }, + ], } - paths_to_exclude = [ - "root['id']", - "root['path']", - "root['elements']['description']['change']['id']", - "root['elements']['description']['change']['value']['changes'][0]['changed_at']", - "root['elements']['description']['change']['value']['path']", - "root['elements']['description']['path']", - ] - - assert ( - DeepDiff( - expected_result_branch1, result["diffs"][0], exclude_paths=paths_to_exclude, ignore_order=True - ).to_dict() - == {} - ) - assert ( - DeepDiff( - expected_result_main, result["diffs"][1], exclude_paths=paths_to_exclude, ignore_order=True - ).to_dict() - == {} - ) + assert DeepDiff(expected_result, result["data"]["DiffTree"], ignore_order=True).to_dict() == {} async def test_update_intf_description_branch1_again(self, client, dataset01, integration_helper): """ @@ -481,59 +549,97 @@ async def test_update_intf_description_branch1_again(self, client, dataset01, in assert intfs[0]["node"]["description"]["value"] == new_description - @pytest.mark.xfail(reason="FIXME: Need to investigate, Previous value is not correct") - def test_validate_diff_again_after_description_update(self, client, dataset01): + async def test_validate_diff_again_after_description_update(self, client, dataset01, integration_helper): + headers = await integration_helper.admin_headers() + with client: - time_from = state.data["time_after_intf_update_branch1"] - time_to = pendulum.now("UTC").to_iso8601_string() - response = client.get( - f"/api/diff/data?branch={branch1}&branch_only=true&time_from={time_from}&time_to={time_to}", + response = client.post( + "/graphql", + json={"query": DIFF_UPDATE, "variables": {"branch": branch1}}, + headers=headers, + ) + assert response.status_code == 200 + result = response.json() + assert result.get("errors") is None + assert result["data"]["DiffUpdate"]["ok"] is True + + response = client.post( + "/graphql", + json={"query": DIFF_TREE_QUERY, "variables": {"branch": branch1}}, headers=headers, ) + assert response.status_code == 200 - assert response.status_code == 200 - assert "errors" not in response.json() - assert response.json() is not None result = response.json() + assert "errors" not in result + assert result expected_result = { - "branch": "branch1", - "kind": "InterfaceL3", - "id": "8f3ed0a5-ed35-47bd-a76e-441f2d90c79a", - "summary": {"added": 0, "removed": 0, "updated": 1}, - "display_label": "Loopback0", - "changed_at": None, - "action": "updated", - "elements": { - "description": { - "type": "Attribute", - "name": "description", - "id": "fbbf4969-ef02-4428-a05f-bc3bee178f51", - "changed_at": None, - "summary": {"added": 0, "removed": 0, "updated": 0}, - "action": "updated", - "value": { - "branch": "branch1", - "type": "HAS_VALUE", - "changed_at": "2023-05-04T18:45:28.584932Z", - "action": "updated", - "value": {"new": "New New description in branch1", "previous": NULL_VALUE}, + "base_branch": "main", + "diff_branch": "branch1", + "num_added": 0, + "num_removed": 0, + "num_updated": 1, + "num_conflicts": 0, + "nodes": [ + { + "uuid": state.data["spine1_id"], + "kind": "InfraDevice", + "label": "spine1", + "status": "UNCHANGED", + "parent": None, + "contains_conflict": False, + "num_added": 0, + "num_removed": 0, + "num_updated": 0, + "num_conflicts": 0, + "attributes": [], + "relationships": [], + }, + { + "uuid": state.data["spine1_lo0_id"], + "kind": "InfraInterfaceL3", + "label": "Loopback0", + "status": "UPDATED", + "parent": { + "uuid": state.data["spine1_id"], + "kind": "InfraDevice", + "relationship_name": "interfaces", }, - "properties": [], - } - }, + "contains_conflict": False, + "num_added": 0, + "num_removed": 0, + "num_updated": 1, + "num_conflicts": 0, + "relationships": [], + "attributes": [ + { + "name": "description", + "status": "UPDATED", + "num_added": 1, + "num_removed": 0, + "num_updated": 0, + "num_conflicts": 0, + "contains_conflict": False, + "conflict": None, + "properties": [ + { + "property_type": "HAS_VALUE", + "previous_value": "NULL", + "new_value": "New New description in branch1", + "previous_label": None, + "new_label": None, + "status": "ADDED", + "conflict": None, + } + ], + } + ], + }, + ], } - paths_to_exclude = [ - "root['id']", - "root['elements']['description']['id']", - "root['elements']['description']['value']['changed_at']", - ] - - assert ( - DeepDiff(expected_result, result["branch1"][0], exclude_paths=paths_to_exclude, ignore_order=True).to_dict() - == {} - ) + assert DeepDiff(expected_result, result["data"]["DiffTree"], ignore_order=True).to_dict() == {} async def test_create_second_branch(self, client, init_db_infra, dataset01, integration_helper): headers = await integration_helper.admin_headers() @@ -731,21 +837,256 @@ async def test_add_new_interface_in_first_branch(self, client, dataset01, integr result = response.json()["data"] assert result["InfraInterfaceL3Create"]["ok"] assert result["InfraInterfaceL3Create"]["object"]["name"]["value"] == "Ethernet8" + state.data["spine1_ethernet8_id"] = result["InfraInterfaceL3Create"]["object"]["id"] + + async def test_validate_diff_after_new_interface(self, client, dataset01, integration_helper): + headers = await integration_helper.admin_headers() - @pytest.mark.xfail(reason="FIXME: Need to refactor once we have the new diff API") - def test_validate_diff_after_new_interface(self, client, dataset01): with client: - response = client.get(f"/api/diff/data?branch={branch1}&branch_only=true", headers=headers) + response = client.post( + "/graphql", + json={"query": DIFF_UPDATE, "variables": {"branch": branch1}}, + headers=headers, + ) + assert response.status_code == 200 + result = response.json() + assert result.get("errors") is None + assert result["data"]["DiffUpdate"]["ok"] is True - assert response.status_code == 200 - assert "errors" not in response.json() - assert response.json() is not None - # result = response.json() + response = client.post( + "/graphql", + json={"query": DIFF_TREE_QUERY, "variables": {"branch": branch1}}, + headers=headers, + ) + assert response.status_code == 200 - # assert DeepDiff(result["diff"]["nodes"], expected_result_nodes, ignore_order=True).to_dict() == {} - # assert ( - # DeepDiff(result["diff"]["relationships"], expected_result_relationships, ignore_order=True).to_dict() == {} - # ) + result = response.json() + assert "errors" not in result + + assert result + diff_tree = result["data"]["DiffTree"] + assert diff_tree["base_branch"] == "main" + assert diff_tree["diff_branch"] == "branch1" + assert diff_tree["num_added"] == 1 + assert diff_tree["num_removed"] == 0 + assert diff_tree["num_updated"] == 2 + assert diff_tree["num_conflicts"] == 0 + node_diffs_by_uuid = {n["uuid"]: n for n in diff_tree["nodes"]} + assert set(node_diffs_by_uuid.keys()) == { + state.data["spine1_lo0_id"], + state.data["spine1_id"], + state.data["spine1_ethernet8_id"], + } + + expected_loopback_0 = { + "uuid": state.data["spine1_lo0_id"], + "kind": "InfraInterfaceL3", + "label": "Loopback0", + "status": "UPDATED", + "parent": { + "uuid": state.data["spine1_id"], + "kind": "InfraDevice", + "relationship_name": "interfaces", + }, + "contains_conflict": False, + "num_added": 0, + "num_removed": 0, + "num_updated": 1, + "num_conflicts": 0, + "attributes": [ + { + "name": "description", + "status": "UPDATED", + "num_added": 1, + "num_removed": 0, + "num_updated": 0, + "num_conflicts": 0, + "contains_conflict": False, + "conflict": None, + "properties": [ + { + "property_type": "HAS_VALUE", + "previous_value": "NULL", + "new_value": "New New description in branch1", + "previous_label": None, + "new_label": None, + "status": "ADDED", + "conflict": None, + } + ], + } + ], + "relationships": [], + } + assert node_diffs_by_uuid[state.data["spine1_lo0_id"]] == expected_loopback_0 + + expected_spine = { + "uuid": state.data["spine1_id"], + "kind": "InfraDevice", + "label": "spine1", + "status": "UPDATED", + "parent": None, + "contains_conflict": False, + "num_added": 0, + "num_removed": 0, + "num_updated": 1, + "num_conflicts": 0, + "attributes": [], + "relationships": [ + { + "name": "interfaces", + "status": "UPDATED", + "cardinality": "MANY", + "contains_conflict": False, + "elements": [ + { + "status": "ADDED", + "peer_id": state.data["spine1_ethernet8_id"], + "contains_conflict": False, + "conflict": None, + "properties": [ + { + "property_type": "IS_RELATED", + "previous_value": None, + "new_value": state.data["spine1_ethernet8_id"], + "previous_label": None, + "new_label": "Ethernet8", + "status": "ADDED", + "conflict": None, + }, + { + "property_type": "IS_PROTECTED", + "previous_value": None, + "new_value": "False", + "previous_label": None, + "new_label": None, + "status": "ADDED", + "conflict": None, + }, + { + "property_type": "IS_VISIBLE", + "previous_value": None, + "new_value": "True", + "previous_label": None, + "new_label": None, + "status": "ADDED", + "conflict": None, + }, + ], + } + ], + } + ], + } + assert DeepDiff(expected_spine, node_diffs_by_uuid[state.data["spine1_id"]], ignore_order=True).to_dict() == {} + + expected_new_attributes = { + "mtu": "1500", + "description": "New interface added in Branch1", + "lacp_priority": "32768", + "enabled": "True", + "name": "Ethernet8", + "role": "leaf", + "speed": "1000", + "status": "active", + "lacp_rate": "Normal", + } + expected_new_interface = { + "uuid": state.data["spine1_ethernet8_id"], + "kind": "InfraInterfaceL3", + "label": "Ethernet8", + "status": "ADDED", + "parent": { + "uuid": state.data["spine1_id"], + "kind": "InfraDevice", + "relationship_name": "interfaces", + }, + "contains_conflict": False, + "num_added": 10, + "num_removed": 0, + "num_updated": 0, + "num_conflicts": 0, + "attributes": [ + { + "name": name, + "status": "ADDED", + "num_added": 3, + "num_removed": 0, + "num_updated": 0, + "num_conflicts": 0, + "contains_conflict": False, + "conflict": None, + "properties": [ + { + "property_type": "HAS_VALUE", + "previous_value": None, + "new_value": new_value, + "previous_label": None, + "new_label": None, + "status": "ADDED", + "conflict": None, + }, + { + "property_type": "IS_PROTECTED", + "previous_value": None, + "new_value": "False", + "previous_label": None, + "new_label": None, + "status": "ADDED", + "conflict": None, + }, + { + "property_type": "IS_VISIBLE", + "previous_value": None, + "new_value": "True", + "previous_label": None, + "new_label": None, + "status": "ADDED", + "conflict": None, + }, + ], + } + for name, new_value in expected_new_attributes.items() + ], + "relationships": [ + { + "name": "device", + "status": "ADDED", + "cardinality": "ONE", + "contains_conflict": False, + "elements": [ + { + "status": "ADDED", + "peer_id": state.data["spine1_id"], + "contains_conflict": False, + "conflict": None, + "properties": [ + { + "property_type": property_type, + "previous_value": None, + "new_value": new_value, + "previous_label": None, + "new_label": new_label, + "status": "ADDED", + "conflict": None, + } + for property_type, new_value, new_label in [ + ("IS_RELATED", state.data["spine1_id"], "spine1"), + ("IS_PROTECTED", "False", None), + ("IS_VISIBLE", "True", None), + ] + ], + } + ], + } + ], + } + assert ( + DeepDiff( + expected_new_interface, node_diffs_by_uuid[state.data["spine1_ethernet8_id"]], ignore_order=True + ).to_dict() + == {} + ) async def test_merge_first_branch_into_main(self, client, dataset01, integration_helper): # Expected description for Loopback0 after the merge diff --git a/backend/tests/unit/api/test_05_query_api.py b/backend/tests/unit/api/test_05_query_api.py index 700ab04df8..1699566382 100644 --- a/backend/tests/unit/api/test_05_query_api.py +++ b/backend/tests/unit/api/test_05_query_api.py @@ -17,22 +17,24 @@ @pytest.fixture async def base_authentication( - db: InfrahubDatabase, - default_branch: Branch, - create_test_admin, - register_core_models_schema, + db: InfrahubDatabase, default_branch: Branch, create_test_admin, register_core_models_schema ) -> None: pass async def test_query_endpoint_group_no_params( - db: InfrahubDatabase, client: TestClient, client_headers, default_branch, car_person_data, patch_services + db: InfrahubDatabase, + client: TestClient, + admin_headers, + create_test_admin, + default_branch, + car_person_data, + patch_services, ): # Must execute in a with block to execute the startup/shutdown events with client: response = client.get( - "/api/query/query01?update_group=true&subscribers=AAAAAA&subscribers=BBBBBB", - headers=client_headers, + "/api/query/query01?update_group=true&subscribers=AAAAAA&subscribers=BBBBBB", headers=admin_headers ) assert "errors" not in response.json() @@ -66,14 +68,11 @@ async def test_query_endpoint_group_no_params( async def test_query_endpoint_group_params( - db: InfrahubDatabase, client: TestClient, client_headers, default_branch, car_person_data + db: InfrahubDatabase, client: TestClient, admin_headers, default_branch, create_test_admin, car_person_data ): # Must execute in a with block to execute the startup/shutdown events with client: - response = client.get( - "/api/query/query02?update_group=true&person=John", - headers=client_headers, - ) + response = client.get("/api/query/query02?update_group=true&person=John", headers=admin_headers) assert "errors" not in response.json() assert response.status_code == 200 @@ -100,14 +99,11 @@ async def test_query_endpoint_group_params( async def test_query_endpoint_get_default_branch( - db: InfrahubDatabase, client: TestClient, client_headers, default_branch, car_person_data + db: InfrahubDatabase, client: TestClient, admin_headers, default_branch, create_test_admin, car_person_data ): # Must execute in a with block to execute the startup/shutdown events with client: - response = client.get( - "/api/query/query01", - headers=client_headers, - ) + response = client.get("/api/query/query01", headers=admin_headers) assert "errors" not in response.json() assert response.status_code == 200 @@ -168,16 +164,19 @@ async def test_query_endpoint_post_with_params( async def test_query_endpoint_branch1( - db: InfrahubDatabase, client: TestClient, client_headers, default_branch, car_person_data, authentication_base + db: InfrahubDatabase, + client: TestClient, + admin_headers, + default_branch, + create_test_admin, + car_person_data, + authentication_base, ): await create_branch(branch_name="branch1", db=db) # Must execute in a with block to execute the startup/shutdown events with client: - response = client.get( - "/api/query/query01?branch=branch1", - headers=client_headers, - ) + response = client.get("/api/query/query01?branch=branch1", headers=admin_headers) assert "errors" not in response.json() assert response.status_code == 200 diff --git a/backend/tests/unit/api/test_11_artifact.py b/backend/tests/unit/api/test_11_artifact.py index b982c5dc53..ec133c8970 100644 --- a/backend/tests/unit/api/test_11_artifact.py +++ b/backend/tests/unit/api/test_11_artifact.py @@ -1,119 +1,140 @@ +from unittest.mock import call, patch + +from starlette.testclient import TestClient + from infrahub.core import registry from infrahub.core.constants import InfrahubKind from infrahub.core.node import Node from infrahub.database import InfrahubDatabase -from infrahub.message_bus import messages - - -async def test_artifact_definition_endpoint( - db: InfrahubDatabase, - client, - admin_headers, - default_branch, - rpc_bus, - register_core_models_schema, - register_builtin_models_schema, - car_person_data_generic, - authentication_base, -): - g1 = await Node.init(db=db, schema=InfrahubKind.STANDARDGROUP) - await g1.new(db=db, name="group1", members=[car_person_data_generic["c1"], car_person_data_generic["c2"]]) - await g1.save(db=db) - - t1 = await Node.init(db=db, schema="CoreTransformPython") - await t1.new( - db=db, - name="transform01", - query=str(car_person_data_generic["q1"].id), - repository=str(car_person_data_generic["r1"].id), - file_path="transform01.py", - class_name="Transform01", - ) - await t1.save(db=db) - - ad1 = await Node.init(db=db, schema=InfrahubKind.ARTIFACTDEFINITION) - await ad1.new( - db=db, - name="artifactdef01", - targets=g1, - transformation=t1, - content_type="application/json", - artifact_name="myartifact", - parameters={"value": {"name": "name__value"}}, - ) - await ad1.save(db=db) - - # Must execute in a with block to execute the startup/shutdown events - with client: - response = client.post( - f"/api/artifact/generate/{ad1.id}", - headers=admin_headers, +from infrahub.git.models import RequestArtifactDefinitionGenerate +from infrahub.server import app +from infrahub.workflows.catalogue import REQUEST_ARTIFACT_DEFINITION_GENERATE +from tests.helpers.test_app import TestInfrahubApp + + +class TestArtifact11(TestInfrahubApp): + async def test_artifact_definition_endpoint( + self, + db: InfrahubDatabase, + admin_headers, + default_branch, + register_core_models_schema, + register_builtin_models_schema, + car_person_data_generic, + authentication_base, + client, + ): + g1 = await Node.init(db=db, schema=InfrahubKind.STANDARDGROUP) + await g1.new(db=db, name="group1", members=[car_person_data_generic["c1"], car_person_data_generic["c2"]]) + await g1.save(db=db) + + t1 = await Node.init(db=db, schema="CoreTransformPython") + await t1.new( + db=db, + name="transform01", + query=str(car_person_data_generic["q1"].id), + repository=str(car_person_data_generic["r1"].id), + file_path="transform01.py", + class_name="Transform01", ) + await t1.save(db=db) + + ad1 = await Node.init(db=db, schema=InfrahubKind.ARTIFACTDEFINITION) + await ad1.new( + db=db, + name="artifactdef01", + targets=g1, + transformation=t1, + content_type="application/json", + artifact_name="myartifact", + parameters={"value": {"name": "name__value"}}, + ) + await ad1.save(db=db) + + app_client = TestClient(app) + + # Must execute in a with block to execute the startup/shutdown events + with ( + app_client, + patch( + "infrahub.services.adapters.workflow.local.WorkflowLocalExecution.submit_workflow" + ) as mock_submit_workflow, + ): + response = app_client.post( + f"/api/artifact/generate/{ad1.id}", + headers=admin_headers, + ) + + assert response.status_code == 200 + expected_calls = [ + call( + workflow=REQUEST_ARTIFACT_DEFINITION_GENERATE, + parameters={ + "model": RequestArtifactDefinitionGenerate(artifact_definition=ad1.id, branch="main", limit=[]) + }, + ), + ] + mock_submit_workflow.assert_has_calls(expected_calls) + + async def test_artifact_endpoint( + self, + db: InfrahubDatabase, + admin_headers, + register_core_models_schema, + register_builtin_models_schema, + car_person_data_generic, + authentication_base, + ): + app_client = TestClient(app) + + with app_client: + response = app_client.get("/api/artifact/95008984-16ca-4e58-8323-0899bb60035f", headers=admin_headers) + assert response.status_code == 404 + + g1 = await Node.init(db=db, schema=InfrahubKind.STANDARDGROUP) + await g1.new(db=db, name="group1", members=[car_person_data_generic["c1"], car_person_data_generic["c2"]]) + await g1.save(db=db) + + t1 = await Node.init(db=db, schema="CoreTransformPython") + await t1.new( + db=db, + name="transform01", + query=str(car_person_data_generic["q1"].id), + repository=str(car_person_data_generic["r1"].id), + file_path="transform01.py", + class_name="Transform01", + ) + await t1.save(db=db) + + ad1 = await Node.init(db=db, schema=InfrahubKind.ARTIFACTDEFINITION) + await ad1.new( + db=db, + name="artifactdef01", + targets=g1, + transformation=t1, + content_type="application/json", + artifact_name="myartifact", + parameters={"value": {"name": "name__value"}}, + ) + await ad1.save(db=db) + + art1 = await Node.init(db=db, schema=InfrahubKind.ARTIFACT) + await art1.new( + db=db, + name="myyartifact", + definition=ad1, + status="Ready", + object=car_person_data_generic["c1"], + storage_id="95008984-16ca-4e58-8323-0899bb60035f", + checksum="60d39063c26263353de24e1b913e1e1c", + content_type="application/json", + ) + await art1.save(db=db) + + registry.storage.store(identifier="95008984-16ca-4e58-8323-0899bb60035f", content='{"test": true}'.encode()) + + with app_client: + response = app_client.get(f"/api/artifact/{art1.id}", headers=admin_headers) - assert response.status_code == 200 - assert ( - messages.RequestArtifactDefinitionGenerate(artifact_definition=ad1.id, branch="main", limit=[]) - in rpc_bus.messages - ) - - -async def test_artifact_endpoint( - db: InfrahubDatabase, - client, - admin_headers, - register_core_models_schema, - register_builtin_models_schema, - car_person_data_generic, - authentication_base, -): - with client: - response = client.get("/api/artifact/95008984-16ca-4e58-8323-0899bb60035f", headers=admin_headers) - assert response.status_code == 404 - - g1 = await Node.init(db=db, schema=InfrahubKind.STANDARDGROUP) - await g1.new(db=db, name="group1", members=[car_person_data_generic["c1"], car_person_data_generic["c2"]]) - await g1.save(db=db) - - t1 = await Node.init(db=db, schema="CoreTransformPython") - await t1.new( - db=db, - name="transform01", - query=str(car_person_data_generic["q1"].id), - repository=str(car_person_data_generic["r1"].id), - file_path="transform01.py", - class_name="Transform01", - ) - await t1.save(db=db) - - ad1 = await Node.init(db=db, schema=InfrahubKind.ARTIFACTDEFINITION) - await ad1.new( - db=db, - name="artifactdef01", - targets=g1, - transformation=t1, - content_type="application/json", - artifact_name="myartifact", - parameters={"value": {"name": "name__value"}}, - ) - await ad1.save(db=db) - - art1 = await Node.init(db=db, schema=InfrahubKind.ARTIFACT) - await art1.new( - db=db, - name="myyartifact", - definition=ad1, - status="Ready", - object=car_person_data_generic["c1"], - storage_id="95008984-16ca-4e58-8323-0899bb60035f", - checksum="60d39063c26263353de24e1b913e1e1c", - content_type="application/json", - ) - await art1.save(db=db) - - registry.storage.store(identifier="95008984-16ca-4e58-8323-0899bb60035f", content='{"test": true}'.encode()) - - with client: - response = client.get(f"/api/artifact/{art1.id}", headers=admin_headers) - - assert response.status_code == 200 - assert response.json() == {"test": True} + assert response.status_code == 200 + assert response.json() == {"test": True} diff --git a/backend/tests/unit/api/test_20_graphql_api.py b/backend/tests/unit/api/test_20_graphql_api.py index 2239ed6f93..e5813b2b6b 100644 --- a/backend/tests/unit/api/test_20_graphql_api.py +++ b/backend/tests/unit/api/test_20_graphql_api.py @@ -7,7 +7,9 @@ from infrahub.database import InfrahubDatabase -async def test_graphql_endpoint(db: InfrahubDatabase, client, client_headers, default_branch: Branch, car_person_data): +async def test_graphql_endpoint( + db: InfrahubDatabase, client, admin_headers, default_branch: Branch, create_test_admin, car_person_data +): query = """ query { TestPerson { @@ -33,11 +35,7 @@ async def test_graphql_endpoint(db: InfrahubDatabase, client, client_headers, de # Must execute in a with block to execute the startup/shutdown events with client: - response = client.post( - "/graphql", - json={"query": query}, - headers=client_headers, - ) + response = client.post("/graphql", json={"query": query}, headers=admin_headers) assert response.status_code == 200 assert "errors" not in response.json() @@ -52,7 +50,7 @@ async def test_graphql_endpoint(db: InfrahubDatabase, client, client_headers, de async def test_graphql_endpoint_with_timestamp( - db: InfrahubDatabase, client, client_headers, default_branch: Branch, car_person_data + db: InfrahubDatabase, client, admin_headers, default_branch: Branch, create_test_admin, car_person_data ): time_before = Timestamp() @@ -76,11 +74,7 @@ async def test_graphql_endpoint_with_timestamp( # Must execute in a with block to execute the startup/shutdown events with client: - response = client.post( - "/graphql", - json={"query": query}, - headers=client_headers, - ) + response = client.post("/graphql", json={"query": query}, headers=admin_headers) assert response.status_code == 200 assert "errors" not in response.json() @@ -92,11 +86,7 @@ async def test_graphql_endpoint_with_timestamp( assert sorted(names) == ["Jane", "Johnny"] with client: - response = client.post( - f"/graphql?at={time_before.to_string()}", - json={"query": query}, - headers=client_headers, - ) + response = client.post(f"/graphql?at={time_before.to_string()}", json={"query": query}, headers=admin_headers) assert response.status_code == 200 assert "errors" not in response.json() diff --git a/backend/tests/unit/api/test_menu.py b/backend/tests/unit/api/test_menu.py index 17157946b9..acdf53fa31 100644 --- a/backend/tests/unit/api/test_menu.py +++ b/backend/tests/unit/api/test_menu.py @@ -4,7 +4,7 @@ from infrahub.database import InfrahubDatabase -async def test_get_menu( +async def test_get_menu_not_admin( db: InfrahubDatabase, client, client_headers, @@ -22,3 +22,30 @@ async def test_get_menu( assert response.status_code == 200 assert response.json() is not None + data = response.json() + internal_menu_items = [item["identifier"] for item in data["sections"]["internal"]] + assert "BuiltinAdmin" not in internal_menu_items + + +async def test_get_menu_admin( + db: InfrahubDatabase, + client, + admin_headers, + authentication_base, + default_branch: Branch, + car_person_schema_generics: SchemaRoot, + car_person_data_generic, +): + await create_default_menu(db=db) + + with client: + response = client.get( + "/api/menu", + headers=admin_headers, + ) + + assert response.status_code == 200 + assert response.json() is not None + data = response.json() + internal_menu_items = [item["identifier"] for item in data["sections"]["internal"]] + assert "BuiltinAdmin" in internal_menu_items diff --git a/backend/tests/unit/conftest.py b/backend/tests/unit/conftest.py index 9b84684649..c095db2100 100644 --- a/backend/tests/unit/conftest.py +++ b/backend/tests/unit/conftest.py @@ -55,7 +55,8 @@ from infrahub.database import InfrahubDatabase from infrahub.dependencies.registry import build_component_registry from infrahub.git import InfrahubRepository -from infrahub.utils import format_label +from infrahub.services import InfrahubServices, services +from infrahub.services.adapters.workflow.local import WorkflowLocalExecution from tests.helpers.file_repo import FileRepo from tests.helpers.test_client import dummy_async_request from tests.test_data import dataset01 as ds01 @@ -2541,10 +2542,7 @@ async def create_test_admin(db: InfrahubDatabase, register_core_models_schema, d permissions: list[Node] = [] global_permission = await Node.init(db=db, schema=InfrahubKind.GLOBALPERMISSION) await global_permission.new( - db=db, - name=format_label(GlobalPermissions.SUPER_ADMIN.value), - action=GlobalPermissions.SUPER_ADMIN.value, - decision=PermissionDecision.ALLOW_ALL.value, + db=db, action=GlobalPermissions.SUPER_ADMIN.value, decision=PermissionDecision.ALLOW_ALL.value ) await global_permission.save(db=db) permissions.append(global_permission) @@ -2624,6 +2622,14 @@ async def second_account(db: InfrahubDatabase, data_schema, node_group_schema, r return obj +@pytest.fixture +async def session_second_account(db: InfrahubDatabase, second_account) -> AccountSession: + session = AccountSession( + authenticated=True, auth_type=AuthType.API, account_id=second_account.id, role="read-write" + ) + return session + + @pytest.fixture async def repos_in_main(db: InfrahubDatabase, register_core_models_schema): repo01 = await Node.init(db=db, schema=InfrahubKind.REPOSITORY) @@ -2923,3 +2929,23 @@ async def prefix_pool_01( ip_dataset_prefix_v4["prefix_pool"] = prefix_pool return ip_dataset_prefix_v4 + + +@pytest.fixture() +def workflow_local(): + original = config.OVERRIDE.workflow + workflow = WorkflowLocalExecution() + config.OVERRIDE.workflow = workflow + yield workflow + config.OVERRIDE.workflow = original + + +@pytest.fixture +def init_service(db: InfrahubDatabase): + original = services.service + database = db + workflow = WorkflowLocalExecution() + service = InfrahubServices(database=database, workflow=workflow) + services.service = service + yield service + services.service = original diff --git a/backend/tests/unit/core/constraint_validators/test_determiner.py b/backend/tests/unit/core/constraint_validators/test_determiner.py index 3156ae189d..9cccca7bac 100644 --- a/backend/tests/unit/core/constraint_validators/test_determiner.py +++ b/backend/tests/unit/core/constraint_validators/test_determiner.py @@ -1,10 +1,9 @@ import pytest -from infrahub_sdk.diff import NodeDiff from infrahub.core import registry from infrahub.core.branch import Branch -from infrahub.core.constants import DiffAction, SchemaPathType -from infrahub.core.diff.model.diff import DiffElementType +from infrahub.core.constants import SchemaPathType +from infrahub.core.diff.model.path import NodeDiffFieldSummary from infrahub.core.models import SchemaUpdateConstraintInfo from infrahub.core.node import Node from infrahub.core.path import SchemaPath @@ -14,22 +13,8 @@ @pytest.fixture def person_name_node_diff( person_john_main: Node, default_branch: Branch -) -> tuple[NodeDiff, set[SchemaUpdateConstraintInfo]]: - node_diff = { - "branch": default_branch.name, - "kind": "TestPerson", - "id": person_john_main.id, - "action": DiffAction.UPDATED.value, - "display_label": "Person John Main Display Label", - "elements": [ - { - "name": "name", - "element_type": DiffElementType.ATTRIBUTE.value, - "action": DiffAction.UPDATED.value, - "summary": {"added": 0, "updated": 1, "removed": 0}, - } - ], - } +) -> tuple[NodeDiffFieldSummary, set[SchemaUpdateConstraintInfo]]: + node_diff = NodeDiffFieldSummary(kind="TestPerson", attribute_names={"name"}) schema_updated_constraint_infos = { SchemaUpdateConstraintInfo( path=SchemaPath( @@ -84,26 +69,8 @@ def person_name_node_diff( @pytest.fixture def person_cars_node_diff( person_john_main: Node, default_branch: Branch -) -> tuple[NodeDiff, set[SchemaUpdateConstraintInfo]]: - node_diff = { - "branch": default_branch.name, - "kind": "TestPerson", - "id": person_john_main.id, - "action": DiffAction.UPDATED.value, - "display_label": "Person John Main Display Label", - "elements": [ - { - "name": "cars", - "element_type": DiffElementType.RELATIONSHIP_MANY.value, - "action": DiffAction.UPDATED.value, - "summary": {"added": 0, "updated": 1, "removed": 0}, - "peers": [ - {"action": DiffAction.REMOVED.value, "summary": {"added": 0, "updated": 0, "removed": 1}}, - {"action": DiffAction.ADDED.value, "summary": {"added": 1, "updated": 0, "removed": 0}}, - ], - } - ], - } +) -> tuple[NodeDiffFieldSummary, set[SchemaUpdateConstraintInfo]]: + node_diff = NodeDiffFieldSummary(kind="TestPerson", relationship_names={"cars"}) schema_updated_constraint_infos = { SchemaUpdateConstraintInfo( constraint_name="relationship.min_count.update", diff --git a/backend/tests/unit/core/constraint_validators/test_relationship_count.py b/backend/tests/unit/core/constraint_validators/test_relationship_count.py index ed92a51f3f..bcbf362255 100644 --- a/backend/tests/unit/core/constraint_validators/test_relationship_count.py +++ b/backend/tests/unit/core/constraint_validators/test_relationship_count.py @@ -48,7 +48,7 @@ async def test_query_failure_cardinality_one( ): person_schema = registry.schema.get(name="TestPerson") cars_rel = person_schema.get_relationship(name="cars") - cars_rel.cardinality = RelationshipCardinality.ONE + cars_rel.max_count = 1 schema_path = SchemaPath(path_type=SchemaPathType.RELATIONSHIP, schema_kind="TestPerson", field_name="cars") query = await RelationshipCountUpdateValidatorQuery.init( @@ -351,6 +351,88 @@ async def test_query_delete_on_branch_success( assert len(all_paths) == 0 +async def test_hierarchical_success(db: InfrahubDatabase, default_branch: Branch, hierarchical_location_data_simple): + site_schema = registry.schema.get(name="LocationSite", duplicate=False) + + schema_path = SchemaPath(path_type=SchemaPathType.RELATIONSHIP, schema_kind="LocationSite", field_name="parent") + query = await RelationshipCountUpdateValidatorQuery.init( + db=db, branch=default_branch, node_schema=site_schema, schema_path=schema_path + ) + + await query.execute(db=db) + + grouped_paths = await query.get_paths() + all_paths = grouped_paths.get_all_data_paths() + assert len(all_paths) == 0 + + +async def test_hierarchical_failure(db: InfrahubDatabase, default_branch: Branch, hierarchical_location_data_simple): + paris_site = hierarchical_location_data_simple["paris"] + branch = await create_branch(branch_name=str("branch2"), db=db) + schema_path = SchemaPath(path_type=SchemaPathType.RELATIONSHIP, schema_kind="LocationSite", field_name="children") + site_schema = registry.schema.get(name="LocationSite", branch=branch, duplicate=False) + + # check no violations to start with + query = await RelationshipCountUpdateValidatorQuery.init( + db=db, branch=branch, node_schema=site_schema, schema_path=schema_path + ) + + await query.execute(db=db) + grouped_paths = await query.get_paths() + all_paths = grouped_paths.get_all_data_paths() + assert len(all_paths) == 0 + + # add a violation + branch_rack = await NodeManager.get_one(db=db, branch=branch, id=paris_site.id) + extra_rack = await Node.init(db=db, branch=branch, schema="LocationRack") + await extra_rack.new(db=db, name="extra_rack", parent=branch_rack, status="online") + await extra_rack.save(db=db) + child_rel = site_schema.get_relationship(name="children") + child_rel.max_count = 2 + + query = await RelationshipCountUpdateValidatorQuery.init( + db=db, branch=branch, node_schema=site_schema, schema_path=schema_path + ) + await query.execute(db=db) + grouped_paths = await query.get_paths() + all_paths = grouped_paths.get_all_data_paths() + assert len(all_paths) == 2 + assert ( + DataPath( + branch=branch.name, + path_type=PathType.NODE, + node_id=paris_site.id, + kind="LocationSite", + field_name="children", + value=1, + ) + in all_paths + ) + assert ( + DataPath( + branch=default_branch.name, + path_type=PathType.NODE, + node_id=paris_site.id, + kind="LocationSite", + field_name="children", + value=2, + ) + in all_paths + ) + + # remove violation + branch_rack = await NodeManager.get_one(db=db, branch=branch, id=extra_rack.id) + await branch_rack.delete(db=db) + + query = await RelationshipCountUpdateValidatorQuery.init( + db=db, branch=branch, node_schema=site_schema, schema_path=schema_path + ) + await query.execute(db=db) + grouped_paths = await query.get_paths() + all_paths = grouped_paths.get_all_data_paths() + assert len(all_paths) == 0 + + async def test_validator( db: InfrahubDatabase, branch: Branch, diff --git a/backend/tests/unit/core/constraint_validators/test_relationship_peer_update.py b/backend/tests/unit/core/constraint_validators/test_relationship_peer_update.py index 33959afb4d..1dc294b7dd 100644 --- a/backend/tests/unit/core/constraint_validators/test_relationship_peer_update.py +++ b/backend/tests/unit/core/constraint_validators/test_relationship_peer_update.py @@ -316,9 +316,6 @@ async def test_query_update_on_branch_success( g_car = await Node.init(db=db, schema="TestGazCar", branch=default_branch) await g_car.new(db=db, name="GCar", nbr_seats=3, mpg=29, owner=p_1) await g_car.save(db=db) - await p_1.cars.get_relationships(db=db) - await p_1.cars.update(db=db, data=[*p_1.cars, g_car]) - await p_1.save(db=db) await branch.rebase(db=db) p_1 = await NodeManager.get_one(db=db, id=p_1.id, branch=branch) diff --git a/backend/tests/unit/core/diff/test_coordinator.py b/backend/tests/unit/core/diff/test_coordinator.py index c23b40c08d..1e73123792 100644 --- a/backend/tests/unit/core/diff/test_coordinator.py +++ b/backend/tests/unit/core/diff/test_coordinator.py @@ -1,10 +1,7 @@ -from unittest.mock import AsyncMock - from infrahub.core.branch import Branch from infrahub.core.constants import DiffAction from infrahub.core.constants.database import DatabaseEdgeType from infrahub.core.diff.coordinator import DiffCoordinator -from infrahub.core.diff.data_check_synchronizer import DiffDataCheckSynchronizer from infrahub.core.initialization import create_branch from infrahub.core.manager import NodeManager from infrahub.core.node import Node @@ -24,8 +21,6 @@ async def test_node_deleted_after_branching( component_registry = get_component_registry() diff_coordinator = await component_registry.get_component(DiffCoordinator, db=db, branch=branch) - mock_synchronizer = AsyncMock(spec=DiffDataCheckSynchronizer) - diff_coordinator.data_check_synchronizer = mock_synchronizer diff = await diff_coordinator.update_branch_diff(base_branch=default_branch, diff_branch=branch) assert diff.base_branch_name == default_branch.name diff --git a/backend/tests/unit/core/diff/test_coordinator_lock.py b/backend/tests/unit/core/diff/test_coordinator_lock.py index 7ebdea57d3..21aeb4ad1c 100644 --- a/backend/tests/unit/core/diff/test_coordinator_lock.py +++ b/backend/tests/unit/core/diff/test_coordinator_lock.py @@ -7,7 +7,6 @@ from infrahub import config, lock from infrahub.core.branch import Branch from infrahub.core.diff.coordinator import DiffCoordinator -from infrahub.core.diff.data_check_synchronizer import DiffDataCheckSynchronizer from infrahub.core.initialization import create_branch from infrahub.core.node import Node from infrahub.core.timestamp import Timestamp @@ -35,8 +34,6 @@ async def get_diff_coordinator(self, db: InfrahubDatabase, diff_branch: Branch) config.SETTINGS.database.max_depth_search_hierarchy = 10 component_registry = get_component_registry() diff_coordinator = await component_registry.get_component(DiffCoordinator, db=db, branch=diff_branch) - mock_synchronizer = AsyncMock(spec=DiffDataCheckSynchronizer) - diff_coordinator.data_check_synchronizer = mock_synchronizer wrapped_repo = AsyncMock(wraps=diff_coordinator.diff_repo) diff_coordinator.diff_repo = wrapped_repo wrapped_calculator = AsyncMock(wraps=diff_coordinator.diff_calculator) diff --git a/backend/tests/unit/core/diff/test_diff_and_merge.py b/backend/tests/unit/core/diff/test_diff_and_merge.py new file mode 100644 index 0000000000..b712ab8ded --- /dev/null +++ b/backend/tests/unit/core/diff/test_diff_and_merge.py @@ -0,0 +1,247 @@ +from unittest.mock import AsyncMock + +import pytest + +from infrahub.core import registry +from infrahub.core.branch import Branch +from infrahub.core.diff.coordinator import DiffCoordinator +from infrahub.core.diff.data_check_synchronizer import DiffDataCheckSynchronizer +from infrahub.core.diff.merger.merger import DiffMerger +from infrahub.core.diff.model.path import ConflictSelection +from infrahub.core.diff.repository.repository import DiffRepository +from infrahub.core.initialization import create_branch +from infrahub.core.manager import NodeManager +from infrahub.core.node import Node +from infrahub.core.schema.attribute_schema import AttributeSchema +from infrahub.core.schema.node_schema import NodeSchema +from infrahub.core.schema.schema_branch import SchemaBranch +from infrahub.core.timestamp import Timestamp +from infrahub.database import InfrahubDatabase +from infrahub.dependencies.registry import get_component_registry + + +class TestDiffAndMerge: + @pytest.fixture + async def diff_repository(self, db: InfrahubDatabase, default_branch: Branch) -> DiffRepository: + component_registry = get_component_registry() + return await component_registry.get_component(DiffRepository, db=db, branch=default_branch) + + async def _get_diff_coordinator(self, db: InfrahubDatabase, branch: Branch) -> DiffCoordinator: + component_registry = get_component_registry() + diff_coordinator = await component_registry.get_component(DiffCoordinator, db=db, branch=branch) + diff_coordinator.data_check_synchronizer = AsyncMock(spec=DiffDataCheckSynchronizer) + return diff_coordinator + + async def _get_diff_merger(self, db: InfrahubDatabase, branch: Branch) -> DiffMerger: + component_registry = get_component_registry() + return await component_registry.get_component(DiffMerger, db=db, branch=branch) + + async def test_diff_and_merge_with_list_attribute( + self, db: InfrahubDatabase, default_branch: Branch, all_attribute_types_schema: NodeSchema + ): + new_node = await Node.init(db=db, schema=all_attribute_types_schema.kind) + await new_node.new(db=db, mylist=["a", "b", 1, 2]) + await new_node.save(db=db) + branch2 = await create_branch(db=db, branch_name="branch2") + branch_node = await NodeManager.get_one(db=db, branch=branch2, id=new_node.id) + branch_node.mylist.value = ["c", "d", 3, 4] + await branch_node.save(db=db) + diff_coordinator = await self._get_diff_coordinator(db=db, branch=branch2) + await diff_coordinator.update_branch_diff(base_branch=default_branch, diff_branch=branch2) + diff_merger = await self._get_diff_merger(db=db, branch=branch2) + await diff_merger.merge_graph(at=Timestamp()) + + updated_node = await NodeManager.get_one(db=db, branch=default_branch, id=new_node.id) + assert updated_node.mylist.value == ["c", "d", 3, 4] + + async def test_diff_and_merge_schema_with_default_values( + self, db: InfrahubDatabase, default_branch: Branch, register_core_models_schema, car_person_schema: SchemaBranch + ): + schema_main = registry.schema.get_schema_branch(name=default_branch.name) + await registry.schema.update_schema_branch( + db=db, branch=default_branch, schema=schema_main, limit=["TestCar", "TestPerson"], update_db=True + ) + branch2 = await create_branch(db=db, branch_name="branch2") + schema_branch = registry.schema.get_schema_branch(name=branch2.name) + schema_branch.duplicate() + car_schema_branch = schema_branch.get(name="TestCar") + car_schema_branch.attributes.append(AttributeSchema(name="num_cupholders", kind="Number", default_value=15)) + car_schema_branch.attributes.append(AttributeSchema(name="is_cool", kind="Boolean", default_value=False)) + car_schema_branch.attributes.append(AttributeSchema(name="nickname", kind="Text", default_value="car")) + schema_branch.set(name="TestCar", schema=car_schema_branch) + schema_branch.process() + await registry.schema.update_schema_branch( + db=db, branch=branch2, schema=schema_branch, limit=["TestCar", "TestPerson"], update_db=True + ) + + diff_coordinator = await self._get_diff_coordinator(db=db, branch=branch2) + await diff_coordinator.update_branch_diff(base_branch=default_branch, diff_branch=branch2) + diff_merger = await self._get_diff_merger(db=db, branch=branch2) + await diff_merger.merge_graph(at=Timestamp()) + + updated_schema = await registry.schema.load_schema_from_db(db=db, branch=default_branch) + car_schema_main = updated_schema.get(name="TestCar", duplicate=False) + new_int_attr = car_schema_main.get_attribute(name="num_cupholders") + assert new_int_attr.default_value == 15 + new_bool_attr = car_schema_main.get_attribute(name="is_cool") + assert new_bool_attr.default_value is False + new_str_attr = car_schema_main.get_attribute(name="nickname") + assert new_str_attr.default_value == "car" + + @pytest.mark.parametrize( + "conflict_selection,expected_value", + [(ConflictSelection.BASE_BRANCH, "John-main"), (ConflictSelection.DIFF_BRANCH, "John-branch")], + ) + async def test_diff_and_merge_with_attribute_value_conflict( + self, + db: InfrahubDatabase, + default_branch: Branch, + diff_repository: DiffRepository, + person_john_main, + person_jane_main, + person_alfred_main, + car_accord_main, + conflict_selection, + expected_value, + ): + branch2 = await create_branch(db=db, branch_name="branch2") + john_main = await NodeManager.get_one(db=db, id=person_john_main.id) + john_main.name.value = "John-main" + await john_main.save(db=db) + john_branch = await NodeManager.get_one(db=db, branch=branch2, id=person_john_main.id) + john_branch.name.value = "John-branch" + await john_branch.save(db=db) + + diff_coordinator = await self._get_diff_coordinator(db=db, branch=branch2) + enriched_diff = await diff_coordinator.update_branch_diff(base_branch=default_branch, diff_branch=branch2) + conflicts_map = enriched_diff.get_all_conflicts() + assert len(conflicts_map) == 1 + conflict = next(iter(conflicts_map.values())) + await diff_repository.update_conflict_by_id(conflict_id=conflict.uuid, selection=conflict_selection) + diff_merger = await self._get_diff_merger(db=db, branch=branch2) + await diff_merger.merge_graph(at=Timestamp()) + + updated_john = await NodeManager.get_one(db=db, id=person_john_main.id) + assert updated_john.name.value == expected_value + + @pytest.mark.parametrize( + "conflict_selection", + [ConflictSelection.BASE_BRANCH, ConflictSelection.DIFF_BRANCH], + ) + async def test_diff_and_merge_with_relationship_conflict( + self, + db: InfrahubDatabase, + default_branch: Branch, + diff_repository: DiffRepository, + person_john_main, + person_jane_main, + person_alfred_main, + car_accord_main, + car_camry_main, + conflict_selection, + ): + branch2 = await create_branch(db=db, branch_name="branch2") + car_main = await NodeManager.get_one(db=db, id=car_accord_main.id) + await car_main.owner.update(db=db, data=person_alfred_main) + await car_main.save(db=db) + car_branch = await NodeManager.get_one(db=db, branch=branch2, id=car_accord_main.id) + await car_branch.owner.update(db=db, data=person_jane_main) + await car_branch.save(db=db) + + diff_coordinator = await self._get_diff_coordinator(db=db, branch=branch2) + enriched_diff = await diff_coordinator.update_branch_diff(base_branch=default_branch, diff_branch=branch2) + conflicts_map = enriched_diff.get_all_conflicts() + assert len(conflicts_map) == 1 + conflict = next(iter(conflicts_map.values())) + await diff_repository.update_conflict_by_id(conflict_id=conflict.uuid, selection=conflict_selection) + diff_merger = await self._get_diff_merger(db=db, branch=branch2) + await diff_merger.merge_graph(at=Timestamp()) + + updated_car = await NodeManager.get_one(db=db, id=car_accord_main.id) + owner_rel = await updated_car.owner.get(db=db) + if conflict_selection is ConflictSelection.BASE_BRANCH: + assert owner_rel.peer_id == person_alfred_main.id + if conflict_selection is ConflictSelection.DIFF_BRANCH: + assert owner_rel.peer_id == person_jane_main.id + + @pytest.mark.parametrize( + "conflict_selection", + [ConflictSelection.BASE_BRANCH, ConflictSelection.DIFF_BRANCH], + ) + async def test_diff_and_merge_with_attribute_property_conflict( + self, + db: InfrahubDatabase, + default_branch: Branch, + diff_repository: DiffRepository, + person_john_main, + person_jane_main, + person_alfred_main, + car_accord_main, + conflict_selection, + ): + branch2 = await create_branch(db=db, branch_name="branch2") + john_main = await NodeManager.get_one(db=db, id=person_john_main.id) + john_main.name.source = person_alfred_main + await john_main.save(db=db) + john_branch = await NodeManager.get_one(db=db, branch=branch2, id=person_john_main.id) + john_branch.name.source = person_jane_main + await john_branch.save(db=db) + + diff_coordinator = await self._get_diff_coordinator(db=db, branch=branch2) + enriched_diff = await diff_coordinator.update_branch_diff(base_branch=default_branch, diff_branch=branch2) + conflicts_map = enriched_diff.get_all_conflicts() + assert len(conflicts_map) == 1 + conflict = next(iter(conflicts_map.values())) + await diff_repository.update_conflict_by_id(conflict_id=conflict.uuid, selection=conflict_selection) + diff_merger = await self._get_diff_merger(db=db, branch=branch2) + await diff_merger.merge_graph(at=Timestamp()) + + updated_john = await NodeManager.get_one(db=db, id=person_john_main.id, include_source=True) + + attr_source = await updated_john.name.get_source(db=db) + if conflict_selection is ConflictSelection.BASE_BRANCH: + assert attr_source.id == person_alfred_main.id + if conflict_selection is ConflictSelection.DIFF_BRANCH: + assert attr_source.id == person_jane_main.id + + @pytest.mark.parametrize( + "conflict_selection", + [ConflictSelection.BASE_BRANCH, ConflictSelection.DIFF_BRANCH], + ) + async def test_diff_and_merge_with_relationship_property_conflict( + self, + db: InfrahubDatabase, + default_branch: Branch, + diff_repository: DiffRepository, + person_john_main, + person_jane_main, + person_alfred_main, + car_accord_main, + car_camry_main, + conflict_selection, + ): + branch2 = await create_branch(db=db, branch_name="branch2") + car_main = await NodeManager.get_one(db=db, id=car_accord_main.id) + await car_main.owner.update(db=db, data={"id": person_john_main.id, "_relation__owner": person_alfred_main.id}) + await car_main.save(db=db) + car_branch = await NodeManager.get_one(db=db, branch=branch2, id=car_accord_main.id) + await car_branch.owner.update(db=db, data={"id": person_john_main.id, "_relation__owner": person_jane_main.id}) + await car_branch.save(db=db) + + diff_coordinator = await self._get_diff_coordinator(db=db, branch=branch2) + enriched_diff = await diff_coordinator.update_branch_diff(base_branch=default_branch, diff_branch=branch2) + conflicts_map = enriched_diff.get_all_conflicts() + # conflict on both sides of the relationship + assert len(conflicts_map) == 2 + for conflict in conflicts_map.values(): + await diff_repository.update_conflict_by_id(conflict_id=conflict.uuid, selection=conflict_selection) + diff_merger = await self._get_diff_merger(db=db, branch=branch2) + await diff_merger.merge_graph(at=Timestamp()) + + updated_car = await NodeManager.get_one(db=db, id=car_accord_main.id, include_owner=True) + owner_rel = await updated_car.owner.get(db=db) + owner_prop = await owner_rel.get_owner(db=db) + if conflict_selection is ConflictSelection.BASE_BRANCH: + assert owner_prop.id == person_alfred_main.id + if conflict_selection is ConflictSelection.DIFF_BRANCH: + assert owner_prop.id == person_jane_main.id diff --git a/backend/tests/unit/core/diff/test_diff_calculator.py b/backend/tests/unit/core/diff/test_diff_calculator.py index 01b083806d..affc26a07b 100644 --- a/backend/tests/unit/core/diff/test_diff_calculator.py +++ b/backend/tests/unit/core/diff/test_diff_calculator.py @@ -693,7 +693,7 @@ async def test_relationship_one_property_branch_update( single_relationship = single_relationships_by_peer_id[person_john_main.id] assert single_relationship.peer_id == person_john_main.id assert single_relationship.action is DiffAction.REMOVED - assert len(single_relationship.properties) == 3 + assert len(single_relationship.properties) == 2 assert before_main_change < single_relationship.changed_at < after_main_change property_diff_by_type = {p.property_type: p for p in single_relationship.properties} property_diff = property_diff_by_type[DatabaseEdgeType.IS_RELATED] @@ -708,12 +708,6 @@ async def test_relationship_one_property_branch_update( assert property_diff.new_value is None assert property_diff.action is DiffAction.REMOVED assert before_main_change < property_diff.changed_at < after_main_change - property_diff = property_diff_by_type[DatabaseEdgeType.IS_PROTECTED] - assert property_diff.property_type == DatabaseEdgeType.IS_PROTECTED - assert property_diff.previous_value is False - assert property_diff.new_value is None - assert property_diff.action is DiffAction.REMOVED - assert before_main_change < property_diff.changed_at < after_main_change async def test_add_node_branch( diff --git a/backend/tests/unit/core/diff/test_diff_payload.py b/backend/tests/unit/core/diff/test_diff_payload.py index fbf70a2d35..646c3fee00 100644 --- a/backend/tests/unit/core/diff/test_diff_payload.py +++ b/backend/tests/unit/core/diff/test_diff_payload.py @@ -114,8 +114,8 @@ async def test_diff_payload_two_updates_one_relationship(db: InfrahubDatabase, p assert owner_element.type is DiffElementType.RELATIONSHIP_ONE assert owner_element.name == "owner" assert owner_element.branch == branch.name - assert owner_element.action is DiffAction.UPDATED - assert owner_element.peer.previous.id == person_albert_main.id + assert owner_element.action is DiffAction.ADDED + assert owner_element.peer.previous is None assert owner_element.peer.new.id == person_alfred_main.id @@ -159,8 +159,8 @@ async def test_diff_payload_three_updates_one_relationship( assert owner_element.type is DiffElementType.RELATIONSHIP_ONE assert owner_element.name == "owner" assert owner_element.branch == branch.name - assert owner_element.action is DiffAction.UPDATED - assert owner_element.peer.previous.id == person_jane_main.id + assert owner_element.action is DiffAction.ADDED + assert owner_element.peer.previous is None assert owner_element.peer.new.id == person_alfred_main.id diff --git a/backend/tests/unit/core/diff/test_diff_repository.py b/backend/tests/unit/core/diff/test_diff_repository.py index 92fc7bce7d..d96d8e2a80 100644 --- a/backend/tests/unit/core/diff/test_diff_repository.py +++ b/backend/tests/unit/core/diff/test_diff_repository.py @@ -15,6 +15,7 @@ EnrichedDiffRoot, EnrichedDiffs, NameTrackingId, + NodeDiffFieldSummary, ) from infrahub.core.diff.repository.deserializer import EnrichedDiffDeserializer from infrahub.core.diff.repository.repository import DiffRepository @@ -43,7 +44,7 @@ def diff_repository(self, db: InfrahubDatabase) -> DiffRepository: config.SETTINGS.database.max_depth_search_hierarchy = 10 return DiffRepository(db=db, deserializer=EnrichedDiffDeserializer()) - def build_diff_node(self, num_sub_fields=2) -> EnrichedDiffNode: + def build_diff_node(self, num_sub_fields=2, no_recurse=False) -> EnrichedDiffNode: enriched_node = EnrichedNodeFactory.build( attributes={ EnrichedAttributeFactory.build( @@ -70,6 +71,8 @@ def build_diff_node(self, num_sub_fields=2) -> EnrichedDiffNode: for _ in range(num_sub_fields) }, ) + if no_recurse: + return enriched_node if num_sub_fields > 1 and len(enriched_node.relationships) > 0: for relationship_group in enriched_node.relationships: relationship_group.nodes = { @@ -455,167 +458,6 @@ async def test_filter_root_node_uuids(self, diff_repository: DiffRepository, res assert len(retrieved) == 1 assert retrieved[0] == replace(this_diff, nodes={parent_node, thin_middle_node, expected_leaf_node}) - # async def test_filter_limit_and_offset_flat(self, diff_repository: DiffRepository, reset_database): - # ordered_nodes = [] - # for kind, label in (("A", "a"), ("A", "b"), ("B", "a"), ("B", "b")): - # ordered_nodes.append(EnrichedNodeFactory.build(kind=kind, label=label, relationships=set())) - # enriched_diff = EnrichedRootFactory.build( - # base_branch_name=self.base_branch_name, - # diff_branch_name=self.diff_branch_name, - # from_time=Timestamp(self.diff_from_time), - # to_time=Timestamp(self.diff_to_time), - # nodes=set(ordered_nodes), - # ) - # await diff_repository.save(enriched_diff=enriched_diff) - - # retrieved = await diff_repository.get( - # base_branch_name=self.base_branch_name, - # diff_branch_names=[self.diff_branch_name], - # from_time=Timestamp(self.diff_from_time), - # to_time=Timestamp(self.diff_to_time), - # limit=2, - # ) - # assert len(retrieved) == 1 - # assert retrieved[0].nodes == set(ordered_nodes[:2]) - - # retrieved = await diff_repository.get( - # base_branch_name=self.base_branch_name, - # diff_branch_names=[self.diff_branch_name], - # from_time=Timestamp(self.diff_from_time), - # to_time=Timestamp(self.diff_to_time), - # limit=2, - # offset=2, - # ) - # assert len(retrieved) == 1 - # assert retrieved[0].nodes == set(ordered_nodes[2:]) - - # async def test_filter_limit_and_offset_with_nested_nodes(self, diff_repository: DiffRepository, reset_database): - # nodes = self._build_nodes(num_nodes=10, num_sub_fields=3) - # enriched_diff = EnrichedRootFactory.build( - # base_branch_name=self.base_branch_name, - # diff_branch_name=self.diff_branch_name, - # from_time=Timestamp(self.diff_from_time), - # to_time=Timestamp(self.diff_to_time), - # nodes=nodes, - # ) - # root_nodes = enriched_diff.get_nodes_without_parents() - # ordered_nodes = list(root_nodes) - # kinds = sorted(random.sample(string.ascii_uppercase, k=5)) - # for i in range(5): - # kind = kinds[i] - # labels = sorted(random.sample(string.ascii_lowercase, k=2)) - # ordered_nodes[2 * i].kind = kind - # ordered_nodes[2 * i + 1].kind = kind - # ordered_nodes[2 * i].label = labels[0] - # ordered_nodes[2 * i + 1].label = labels[1] - # await diff_repository.save(enriched_diff=enriched_diff) - - # retrieved = await diff_repository.get( - # base_branch_name=self.base_branch_name, - # diff_branch_names=[self.diff_branch_name], - # from_time=Timestamp(self.diff_from_time), - # to_time=Timestamp(self.diff_to_time), - # limit=2, - # ) - # expected_root_nodes = set(ordered_nodes[:2]) - # all_expected_nodes = set(expected_root_nodes) - # for n in expected_root_nodes: - # all_expected_nodes |= n.get_all_child_nodes() - # assert len(retrieved) == 1 - # assert retrieved[0].get_nodes_without_parents() == expected_root_nodes - # assert retrieved[0].nodes == all_expected_nodes - - # retrieved = await diff_repository.get( - # base_branch_name=self.base_branch_name, - # diff_branch_names=[self.diff_branch_name], - # from_time=Timestamp(self.diff_from_time), - # to_time=Timestamp(self.diff_to_time), - # limit=4, - # offset=2, - # ) - # expected_root_nodes = set(ordered_nodes[2:6]) - # all_expected_nodes = set(expected_root_nodes) - # for n in expected_root_nodes: - # all_expected_nodes |= n.get_all_child_nodes() - # assert len(retrieved) == 1 - # assert retrieved[0].get_nodes_without_parents() == set(ordered_nodes[2:6]) - # assert retrieved[0].nodes == all_expected_nodes - - # async def test_filter_limit_and_offset_across_multiple_roots(self, diff_repository: DiffRepository, reset_database): - # enriched_diffs = [] - # node_uuids = [str(uuid4()) for _ in range(3)] - # first_nodes = [] - # second_nodes = [] - # third_nodes = [] - # start_time = self.diff_from_time.add(minutes=1) - # for i in range(3): - # nodes = self._build_nodes(num_nodes=3, num_sub_fields=2) - # enriched_diff = EnrichedRootFactory.build( - # base_branch_name=self.base_branch_name, - # diff_branch_name=self.diff_branch_name, - # from_time=Timestamp(start_time.add(minutes=i * 30)), - # to_time=Timestamp(start_time.add(minutes=(i * 30) + 29)), - # nodes=nodes, - # ) - # enriched_diffs.append(enriched_diff) - # root_nodes = enriched_diff.get_nodes_without_parents() - # ordered_nodes = list(root_nodes) - # first_node, second_node, third_node = ordered_nodes - # first_node.kind = "A" - # first_node.label = "a" - # first_node.uuid = node_uuids[0] - # first_nodes.append(first_node) - # second_node.kind = "B" - # second_node.label = "b" - # second_node.uuid = node_uuids[1] - # second_nodes.append(second_node) - # third_node.kind = "C" - # third_node.label = "c" - # third_node.uuid = node_uuids[2] - # third_nodes.append(third_node) - # await diff_repository.save(enriched_diff=enriched_diff) - - # retrieved = await diff_repository.get( - # base_branch_name=self.base_branch_name, - # diff_branch_names=[self.diff_branch_name], - # from_time=Timestamp(start_time), - # to_time=Timestamp(start_time.add(minutes=100)), - # limit=1, - # ) - # assert len(retrieved) == 3 - # for index, retrieved_root in enumerate(retrieved): - # root_nodes = retrieved_root.get_nodes_without_parents() - # assert len(root_nodes) == 1 - # assert root_nodes == {first_nodes[index]} - - # retrieved = await diff_repository.get( - # base_branch_name=self.base_branch_name, - # diff_branch_names=[self.diff_branch_name], - # from_time=Timestamp(start_time), - # to_time=Timestamp(start_time.add(minutes=100)), - # limit=1, - # offset=1, - # ) - # assert len(retrieved) == 3 - # for index, retrieved_root in enumerate(retrieved): - # root_nodes = retrieved_root.get_nodes_without_parents() - # assert len(root_nodes) == 1 - # assert root_nodes == {second_nodes[index]} - - # retrieved = await diff_repository.get( - # base_branch_name=self.base_branch_name, - # diff_branch_names=[self.diff_branch_name], - # from_time=Timestamp(start_time), - # to_time=Timestamp(start_time.add(minutes=100)), - # limit=1, - # offset=2, - # ) - # assert len(retrieved) == 3 - # for index, retrieved_root in enumerate(retrieved): - # root_nodes = retrieved_root.get_nodes_without_parents() - # assert len(root_nodes) == 1 - # assert root_nodes == {third_nodes[index]} - async def test_save_and_retrieve_many_diffs(self, diff_repository: DiffRepository, reset_database): diffs_to_retrieve: list[EnrichedDiffRoot] = [] start_time = self.diff_from_time.add(seconds=1) @@ -729,3 +571,46 @@ async def test_get_by_tracking_id(self, diff_repository: DiffRepository, reset_d tracking_id=BranchTrackingId(name="not a branch"), diff_branch_name=self.diff_branch_name, ) + + async def test_get_node_field_summaries(self, diff_repository: DiffRepository): + diff_nodes = self._build_nodes(num_nodes=5, num_sub_fields=2) + for diff_node in list(diff_nodes)[:3]: + same_kind_diff_node = self.build_diff_node(num_sub_fields=3, no_recurse=True) + same_kind_diff_node.kind = diff_node.kind + same_attr_names = random.sample([a.name for a in diff_node.attributes], k=min(len(diff_node.attributes), 2)) + for attr_diff, attr_name in zip(list(same_kind_diff_node.attributes)[:2], same_attr_names): + attr_diff.name = attr_name + same_rel_names = random.sample( + [r.name for r in diff_node.relationships], k=min(len(diff_node.relationships), 2) + ) + for rel_diff, rel_name in zip(list(same_kind_diff_node.relationships)[:2], same_rel_names): + rel_diff.name = rel_name + diff_nodes.add(same_kind_diff_node) + diff_root = EnrichedRootFactory.build(nodes=diff_nodes) + diff_root.tracking_id = BranchTrackingId(name=diff_root.diff_branch_name) + await self._save_single_diff(diff_repository=diff_repository, enriched_diff=diff_root) + + expected_map: dict[str, NodeDiffFieldSummary] = {} + for node in diff_root.nodes: + if node.action is DiffAction.UNCHANGED: + continue + if node.kind not in expected_map: + expected_map[node.kind] = NodeDiffFieldSummary(kind=node.kind) + field_summary = expected_map[node.kind] + attr_names = {a.name for a in node.attributes if a.action is not DiffAction.UNCHANGED} + field_summary.attribute_names.update(attr_names) + rel_names = {r.name for r in node.relationships if r.action is not DiffAction.UNCHANGED} + field_summary.relationship_names.update(rel_names) + expected_map = {k: v for k, v in expected_map.items() if v.relationship_names or v.attribute_names} + + retrieved_node_field_summaries = await diff_repository.get_node_field_summaries( + diff_branch_name=diff_root.diff_branch_name, tracking_id=diff_root.tracking_id + ) + retrieved_map = {summary.kind: summary for summary in retrieved_node_field_summaries} + assert expected_map == retrieved_map + + retrieved_node_field_summaries = await diff_repository.get_node_field_summaries( + diff_branch_name=diff_root.diff_branch_name, diff_id=diff_root.uuid + ) + retrieved_map = {summary.kind: summary for summary in retrieved_node_field_summaries} + assert expected_map == retrieved_map diff --git a/backend/tests/unit/core/migrations/schema/test_node_attribute_add.py b/backend/tests/unit/core/migrations/schema/test_node_attribute_add.py index 34ef2a0964..91addd687c 100644 --- a/backend/tests/unit/core/migrations/schema/test_node_attribute_add.py +++ b/backend/tests/unit/core/migrations/schema/test_node_attribute_add.py @@ -4,11 +4,17 @@ from infrahub_sdk import InfrahubClient from infrahub_sdk.uuidt import UUIDT -from infrahub.core.constants import SchemaPathType +from infrahub.core import registry +from infrahub.core.branch import Branch +from infrahub.core.constants import HashableModelState, SchemaPathType from infrahub.core.migrations.schema.node_attribute_add import ( NodeAttributeAddMigration, NodeAttributeAddMigrationQuery01, ) +from infrahub.core.migrations.schema.node_attribute_remove import ( + NodeAttributeRemoveMigration, + NodeAttributeRemoveMigrationQuery01, +) from infrahub.core.path import SchemaPath from infrahub.core.schema import NodeSchema from infrahub.core.timestamp import Timestamp @@ -88,6 +94,54 @@ async def test_query01(db: InfrahubDatabase, default_branch, init_database, sche assert await count_nodes(db=db, label="Attribute") == 5 +async def test_query01_re_add(db: InfrahubDatabase, default_branch: Branch, car_accord_main, car_camry_main): + schema = registry.schema.get_schema_branch(name=default_branch.name) + + assert await count_nodes(db=db, label="TestCar") == 2 + assert await count_nodes(db=db, label="Attribute") == 14 + + # ------------------------------------------ + # Delete the attribute Color + # ------------------------------------------ + candidate_schema = schema.duplicate() + car_schema = candidate_schema.get_node(name="TestCar") + attr = car_schema.get_attribute(name="color") + attr.state = HashableModelState.ABSENT + + migration_remove = NodeAttributeRemoveMigration( + previous_node_schema=schema.get_node(name="TestCar"), + new_node_schema=car_schema, + schema_path=SchemaPath(path_type=SchemaPathType.ATTRIBUTE, schema_kind="TestCar", field_name="color"), + ) + query = await NodeAttributeRemoveMigrationQuery01.init(db=db, branch=default_branch, migration=migration_remove) + await query.execute(db=db) + assert query.get_nbr_migrations_executed() == 2 + + # ------------------------------------------ + # Add the attribute Color back + # ------------------------------------------ + migration_add = NodeAttributeAddMigration( + new_node_schema=schema.get_node(name="TestCar"), + previous_node_schema=car_schema, + schema_path=SchemaPath(path_type=SchemaPathType.ATTRIBUTE, schema_kind="TestCar", field_name="color"), + ) + query = await NodeAttributeAddMigrationQuery01.init(db=db, branch=default_branch, migration=migration_add) + await query.execute(db=db) + + assert query.get_nbr_migrations_executed() == 2 + + assert await count_nodes(db=db, label="TestCar") == 2 + assert await count_nodes(db=db, label="Attribute") == 16 + + # Re-execute the query once to ensure that it won't recreate the attribute twice + query = await NodeAttributeAddMigrationQuery01.init(db=db, branch=default_branch, migration=migration_add) + await query.execute(db=db) + + assert query.get_nbr_migrations_executed() == 0 + assert await count_nodes(db=db, label="TestCar") == 2 + assert await count_nodes(db=db, label="Attribute") == 16 + + async def test_migration(db: InfrahubDatabase, default_branch, init_database, schema_aware): node = schema_aware migration = NodeAttributeAddMigration( diff --git a/backend/tests/unit/core/resource_manager/test_number_pool.py b/backend/tests/unit/core/resource_manager/test_number_pool.py index 02ea6c0219..b05f28d9b2 100644 --- a/backend/tests/unit/core/resource_manager/test_number_pool.py +++ b/backend/tests/unit/core/resource_manager/test_number_pool.py @@ -24,3 +24,10 @@ async def test_allocate_from_number_pool(db: InfrahubDatabase, default_branch: B assert ticket1.ticket_id.value == 1 assert ticket2.ticket_id.value == 2 + + # If a resource is deleted the allocated number should be returned to the pool + await ticket2.delete(db=db) + recreated_ticket2 = await Node.init(db=db, schema=TICKET.kind) + await recreated_ticket2.new(db=db, title="ticket2", ticket_id={"from_pool": {"id": np1.id}}) + await recreated_ticket2.save(db=db) + assert recreated_ticket2.ticket_id.value == 2 diff --git a/backend/tests/unit/core/schema/__init__.py b/backend/tests/unit/core/schema/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/tests/unit/core/schema/schema_branch/__init__.py b/backend/tests/unit/core/schema/schema_branch/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/tests/unit/core/schema/schema_branch/test_schema_uniqueness_constraints.py b/backend/tests/unit/core/schema/schema_branch/test_schema_uniqueness_constraints.py new file mode 100644 index 0000000000..b44524a696 --- /dev/null +++ b/backend/tests/unit/core/schema/schema_branch/test_schema_uniqueness_constraints.py @@ -0,0 +1,73 @@ +import pytest + +from infrahub.core.schema import AttributeSchema, NodeSchema, SchemaRoot +from infrahub.core.schema.schema_branch import SchemaBranch + + +@pytest.mark.parametrize( + "schema_root,expected_error", + [ + pytest.param( + SchemaRoot( + nodes=[ + NodeSchema( + name="Person", + namespace="Testing", + uniqueness_constraints=[["first_name__value"]], + attributes=[ + AttributeSchema( + name="name", + kind="Text", + ), + AttributeSchema( + name="description", + kind="Text", + optional=True, + ), + ], + ), + ], + ), + "TestingPerson: Requested unique constraint not found within node. (`first_name__value`)", + id="missing_all", + ), + pytest.param( + SchemaRoot( + nodes=[ + NodeSchema( + name="Person", + namespace="Testing", + uniqueness_constraints=[ + ["first_name__value", "last_name__value"], + ["origin__value", "family__value"], + ], + attributes=[ + AttributeSchema( + name="first_name", + kind="Text", + ), + AttributeSchema( + name="last_name", + kind="Text", + ), + AttributeSchema( + name="origin", + kind="Text", + ), + ], + ), + ], + ), + "TestingPerson.uniqueness_constraints: family__value is invalid on schema TestingPerson", + id="missing_single", + ), + ], +) +async def test_schema_protected_generics(schema_root: SchemaRoot, expected_error: str): + schema = SchemaBranch(cache={}, name="test") + schema.load_schema(schema=schema_root) + + with pytest.raises(ValueError) as exc: + schema.process_validate() + + assert expected_error == str(exc.value) diff --git a/backend/tests/unit/core/schema_manager/test_manager_schema.py b/backend/tests/unit/core/schema_manager/test_manager_schema.py index 0179f7938e..1f534aa51e 100644 --- a/backend/tests/unit/core/schema_manager/test_manager_schema.py +++ b/backend/tests/unit/core/schema_manager/test_manager_schema.py @@ -11,6 +11,7 @@ from infrahub.core.constants import ( AllowOverrideType, BranchSupportType, + HashableModelState, InfrahubKind, RelationshipDeleteBehavior, RelationshipKind, @@ -367,6 +368,41 @@ async def test_schema_branch_add_groups(schema_all_in_one): assert std_group.get_relationship_or_none(name="subscriber_of_groups") is None +async def test_schema_branch_cleanup_inherited_elements(schema_all_in_one): + schema = SchemaBranch(cache={}, name="test") + schema.load_schema(schema=SchemaRoot(**schema_all_in_one)) + + schema.process_inheritance() + + schema = SchemaBranch(cache={}, name="test") + schema.load_schema(schema=SchemaRoot(**schema_all_in_one)) + schema.process() + + generic = schema.get(name="InfraGenericInterface") + attr1 = generic.get_attribute(name="mybool") + attr1.state = HashableModelState.ABSENT + rel1 = generic.get_relationship(name="primary_tag") + rel1.state = HashableModelState.ABSENT + schema.set(name=generic.kind, schema=generic) + + node = schema.get(name="BuiltinCriticality") + attr1_node = node.get_attribute(name="mybool") + assert attr1_node.inherited is True + assert attr1_node.state == HashableModelState.PRESENT + rel1_node = node.get_relationship(name="primary_tag") + assert rel1_node.inherited is True + assert rel1_node.state == HashableModelState.PRESENT + + schema.cleanup_inherited_elements() + node = schema.get(name="BuiltinCriticality") + attr1_node = node.get_attribute(name="mybool") + assert attr1_node.inherited is True + assert attr1_node.state == HashableModelState.ABSENT + rel1_node = node.get_relationship(name="primary_tag") + assert rel1_node.inherited is True + assert rel1_node.state == HashableModelState.ABSENT + + @pytest.mark.parametrize( "schema_dict,expected_error", [ diff --git a/backend/tests/unit/core/test_attribute.py b/backend/tests/unit/core/test_attribute.py index fcb5ae8bf0..fe16fdab6b 100644 --- a/backend/tests/unit/core/test_attribute.py +++ b/backend/tests/unit/core/test_attribute.py @@ -12,6 +12,7 @@ Integer, IPHost, IPNetwork, + ListAttribute, MacAddress, String, ) @@ -702,3 +703,122 @@ async def test_attribute_size(db: InfrahubDatabase, default_branch: Branch, all_ # TextArea field should have no size limitation await obj.new(db=db, name="obj2", mytextarea=large_string) await obj.save(db=db) + + +@pytest.mark.parametrize("updated_status,expected_is_default", [(None, True), ("online", True), ("offline", False)]) +async def test_enum_with_default_preserves_is_default( + db: InfrahubDatabase, + default_branch: Branch, + hierarchical_location_data_simple: dict[str, Node], + updated_status, + expected_is_default, +): + site = hierarchical_location_data_simple["paris"] + rack = await Node.init(db=db, schema="LocationRack") + await rack.new(db=db, name="new-rack", parent=site) + await rack.save(db=db) + status_enum_by_value = { + "online": rack.status.value.ONLINE, + "offline": rack.status.value.OFFLINE, + } + assert rack.status.is_default + assert rack.status.value.value == "online" + + retrieved_rack = await NodeManager.get_one(db=db, id=rack.id) + assert retrieved_rack.status.value.value == "online" + assert retrieved_rack.status.is_default + retrieved_rack.name.value = "updated-rack" + expected_status = "online" + if updated_status: + retrieved_rack.status.value = status_enum_by_value[updated_status] + expected_status = updated_status + await retrieved_rack.save(db=db) + assert retrieved_rack.status.value.value == expected_status + assert retrieved_rack.status.is_default is expected_is_default + + updated_rack = await NodeManager.get_one(db=db, id=rack.id) + assert updated_rack.name.value == "updated-rack" + assert updated_rack.status.value.value == expected_status + assert updated_rack.status.is_default is expected_is_default + + +@pytest.mark.parametrize( + "regex_value,input_value,error", + [ + pytest.param( + "^box_", + ["mystring"], + "mystring must conform with the regex: '^box_' at test", + id="not-a-box", + ), + pytest.param( + "^box_", + ["box_a", "box_b", "chest_a"], + "chest_a must conform with the regex: '^box_' at test", + id="is-chest", + ), + ], +) +def test_attribute_list_invalid_regex( + default_branch: Branch, regex_value: str, input_value: list[str], error: str +) -> None: + storage_attribute = AttributeSchema(name="storage", kind="List", regex=regex_value) + widget = NodeSchema( + name="Widget", + namespace="Testing", + label="Widget", + attributes=[ + storage_attribute, + ], + ) + + with pytest.raises(ValidationError) as exc: + ListAttribute( + id=str(UUIDT()), + name="test", + schema=storage_attribute, + branch=default_branch, + at=Timestamp(), + node=Node(schema=widget, branch=default_branch, at=Timestamp()), + data=input_value, + ) + + assert error in str(exc.value) + + +@pytest.mark.parametrize( + "regex_value,input_value", + [ + pytest.param( + "^box_", + ["box_one"], + id="a-box", + ), + pytest.param( + "^box_", + ["box_a", "box_b", "box_another"], + id="several_boxes", + ), + ], +) +def test_attribute_list_regex(default_branch: Branch, regex_value: str, input_value: list[str]) -> None: + storage_attribute = AttributeSchema(name="storage", kind="List", regex=regex_value) + widget = NodeSchema( + name="Widget", + namespace="Testing", + label="Widget", + attributes=[ + storage_attribute, + ], + ) + + list_attrib = ListAttribute( + id=str(UUIDT()), + name="test", + schema=storage_attribute, + branch=default_branch, + at=Timestamp(), + node=Node(schema=widget, branch=default_branch, at=Timestamp()), + data=input_value, + ) + assert list_attrib.value == input_value diff --git a/backend/tests/unit/core/test_branch_merge.py b/backend/tests/unit/core/test_branch_merge.py index 692dfea9f6..c010def8d0 100644 --- a/backend/tests/unit/core/test_branch_merge.py +++ b/backend/tests/unit/core/test_branch_merge.py @@ -2,6 +2,7 @@ from infrahub.core.branch import Branch from infrahub.core.constants import InfrahubKind from infrahub.core.diff.coordinator import DiffCoordinator +from infrahub.core.diff.merger.merger import DiffMerger from infrahub.core.initialization import create_branch from infrahub.core.manager import NodeManager from infrahub.core.merge import BranchMerger @@ -17,7 +18,10 @@ async def test_validate_graph(db: InfrahubDatabase, base_dataset_02, register_core_models_schema): branch1 = await Branch.get_by_name(name="branch1", db=db) - merger = BranchMerger(db=db, source_branch=branch1) + component_registry = get_component_registry() + diff_coordinator = await component_registry.get_component(DiffCoordinator, db=db, branch=branch1) + diff_merger = await component_registry.get_component(DiffMerger, db=db, branch=branch1) + merger = BranchMerger(db=db, diff_coordinator=diff_coordinator, diff_merger=diff_merger, source_branch=branch1) conflicts = await merger.validate_graph() assert not conflicts @@ -28,7 +32,7 @@ async def test_validate_graph(db: InfrahubDatabase, base_dataset_02, register_co c1.name.value = "new name" await c1.save(db=db) - merger = BranchMerger(db=db, source_branch=branch1) + merger = BranchMerger(db=db, diff_coordinator=diff_coordinator, diff_merger=diff_merger, source_branch=branch1) conflicts = await merger.validate_graph() assert conflicts @@ -38,7 +42,10 @@ async def test_validate_graph(db: InfrahubDatabase, base_dataset_02, register_co async def test_validate_empty_branch(db: InfrahubDatabase, base_dataset_02, register_core_models_schema): branch2 = await create_branch(branch_name="branch2", db=db) - merger = BranchMerger(db=db, source_branch=branch2) + component_registry = get_component_registry() + diff_coordinator = await component_registry.get_component(DiffCoordinator, db=db, branch=branch2) + diff_merger = await component_registry.get_component(DiffMerger, db=db, branch=branch2) + merger = BranchMerger(db=db, diff_coordinator=diff_coordinator, diff_merger=diff_merger, source_branch=branch2) conflicts = await merger.validate_graph() assert not conflicts @@ -50,9 +57,9 @@ async def test_merge_graph(db: InfrahubDatabase, default_branch, base_dataset_02 at = Timestamp() component_registry = get_component_registry() diff_coordinator = await component_registry.get_component(DiffCoordinator, db=db, branch=branch1) + diff_merger = await component_registry.get_component(DiffMerger, db=db, branch=branch1) await diff_coordinator.update_branch_diff(base_branch=default_branch, diff_branch=branch1) - merger = BranchMerger(db=db, source_branch=branch1) - await merger.merge_graph(at=at) + await diff_merger.merge_graph(at=at) # Query all cars in MAIN, AFTER the merge cars = sorted(await NodeManager.query(schema="TestCar", db=db), key=lambda c: c.id) @@ -86,8 +93,7 @@ async def test_merge_graph(db: InfrahubDatabase, default_branch, base_dataset_02 assert cars[0].nbr_seats.value == 4 # It should be possible to merge a graph even without changes - merger = BranchMerger(db=db, source_branch=branch1) - await merger.merge_graph(at=at) + await diff_merger.merge_graph(at=at) async def test_merge_graph_delete(db: InfrahubDatabase, default_branch, base_dataset_02, register_core_models_schema): @@ -102,8 +108,8 @@ async def test_merge_graph_delete(db: InfrahubDatabase, default_branch, base_dat await p3.delete(db=db) await diff_coordinator.update_branch_diff(base_branch=default_branch, diff_branch=branch1) - merger = BranchMerger(db=db, source_branch=branch1) - await merger.merge_graph(at=Timestamp()) + diff_merger = await component_registry.get_component(DiffMerger, db=db, branch=branch1) + await diff_merger.merge_graph(at=Timestamp()) # Query all cars in MAIN, AFTER the merge persons = sorted(await NodeManager.query(schema="TestPerson", db=db), key=lambda p: p.id) @@ -142,8 +148,8 @@ async def test_merge_relationship_many( await org1_main.save(db=db) await diff_coordinator.update_branch_diff(base_branch=default_branch, diff_branch=branch1) - merger = BranchMerger(db=db, source_branch=branch1) - await merger.merge_graph(at=Timestamp()) + diff_merger = await component_registry.get_component(DiffMerger, db=db, branch=branch1) + await diff_merger.merge_graph(at=Timestamp()) org1_main = await NodeManager.get_one(id=org1.id, db=db) assert len(await org1_main.tags.get(db=db)) == 3 @@ -190,7 +196,16 @@ async def test_merge_update_schema( ) schema_branch = registry.schema.get_schema_branch(name=branch2.name) - merger = BranchMerger(db=db, source_branch=branch2, destination_branch=default_branch) + component_registry = get_component_registry() + diff_coordinator = await component_registry.get_component(DiffCoordinator, db=db, branch=branch2) + diff_merger = await component_registry.get_component(DiffMerger, db=db, branch=branch2) + merger = BranchMerger( + db=db, + diff_coordinator=diff_coordinator, + diff_merger=diff_merger, + source_branch=branch2, + destination_branch=default_branch, + ) assert await merger.update_schema() is True assert sorted(merger.migrations, key=lambda x: x.path.get_path()) == sorted( [ diff --git a/backend/tests/unit/core/test_branch_rebase.py b/backend/tests/unit/core/test_branch_rebase.py index a0fe595ded..9a7615cd9c 100644 --- a/backend/tests/unit/core/test_branch_rebase.py +++ b/backend/tests/unit/core/test_branch_rebase.py @@ -1,9 +1,13 @@ +import pytest + from infrahub.core.branch import Branch +from infrahub.core.branch.tasks import rebase_branch from infrahub.core.constants import InfrahubKind from infrahub.core.initialization import create_branch from infrahub.core.manager import NodeManager from infrahub.core.node import Node from infrahub.database import InfrahubDatabase +from infrahub.exceptions import ValidationError async def test_rebase_graph(db: InfrahubDatabase, base_dataset_02, register_core_models_schema): @@ -79,3 +83,23 @@ async def test_merge_relationship_many( # All Relationship are in BRANCH1 after the REBASE org1_branch = await NodeManager.get_one(id=org1.id, branch=branch1, db=db) assert len(await org1_branch.tags.get(db=db)) == 3 + + +async def test_branch_rebase_diff_conflict( + db: InfrahubDatabase, + default_branch: Branch, + workflow_local, + init_service, + car_person_schema, + car_camry_main, +): + branch2 = await create_branch(db=db, branch_name="branch2") + car_main = await NodeManager.get_one(db=db, id=car_camry_main.id) + car_main.name.value += "-main" + await car_main.save(db=db) + car_branch = await NodeManager.get_one(db=db, branch=branch2, id=car_camry_main.id) + car_branch.name.value += "-branch" + await car_branch.save(db=db) + + with pytest.raises(ValidationError, match="contains conflicts with the default branch that must be addressed"): + await rebase_branch(branch=branch2.name) diff --git a/backend/tests/unit/core/test_node_get_list_query.py b/backend/tests/unit/core/test_node_get_list_query.py index e69c687e2c..7818a570ae 100644 --- a/backend/tests/unit/core/test_node_get_list_query.py +++ b/backend/tests/unit/core/test_node_get_list_query.py @@ -13,8 +13,10 @@ from infrahub.core.node import Node from infrahub.core.query.node import NodeGetListQuery from infrahub.core.registry import registry +from infrahub.core.schema import SchemaRoot from infrahub.core.schema.relationship_schema import RelationshipSchema from infrahub.database import InfrahubDatabase +from tests.helpers.schema import WIDGET async def test_query_NodeGetListQuery( @@ -818,3 +820,32 @@ async def test_query_NodeGetListQuery_multiple_profiles_same_priority_filter_and query = await NodeGetListQuery.init(db=db, branch=branch, schema=car_schema) await query.execute(db=db) assert query.get_node_ids() == [car_camry_main.id, car_accord_main.id] + + +async def test_query_NodeGetListQuery_pagination_order_by( + db: InfrahubDatabase, default_branch: Branch, node_group_schema +): + """Validate that pagination works for nodes which have an order_by clause on non unique attributes.""" + schema_root = SchemaRoot(nodes=[WIDGET]) + + registry.schema.register_schema(schema=schema_root, branch=default_branch.name) + + widget_schema = registry.schema.get_node_schema("TestingWidget", branch=default_branch, duplicate=False) + + for i in range(20): + car_profile = await Node.init(db=db, schema=widget_schema, branch=default_branch) + await car_profile.new(db=db, name="top-widget", description=f"widget index {i}") + await car_profile.save(db=db) + + node_ids = set() + for offset in range(0, 19, 2): + query = await NodeGetListQuery.init(db=db, branch=default_branch, schema=widget_schema, limit=2, offset=offset) + await query.execute(db=db) + + result_ids = query.get_node_ids() + node_ids.update(result_ids) + + # If we don't get 20 results it means that the pagination is returning the same node multiple times + assert len(node_ids) == 20 + # Validate that the order_by clause hasn't changed on the test schema which would defeat the purpose of this test + assert widget_schema.order_by == ["name__value"] diff --git a/backend/tests/unit/core/test_relationships_rebase.py b/backend/tests/unit/core/test_relationships_rebase.py new file mode 100644 index 0000000000..3eb3c0f098 --- /dev/null +++ b/backend/tests/unit/core/test_relationships_rebase.py @@ -0,0 +1,356 @@ +from random import choice +from typing import Any, NamedTuple + +import pytest + +from infrahub.core.branch import Branch +from infrahub.core.initialization import create_branch +from infrahub.core.manager import NodeManager +from infrahub.core.node import Node +from infrahub.core.schema.schema_branch import SchemaBranch +from infrahub.core.timestamp import Timestamp +from infrahub.database import InfrahubDatabase + + +class DatabaseEdge(NamedTuple): + type: str + branch: str + status: str + from_time: Timestamp + to_time: Timestamp | None + + +class DatabasePath(NamedTuple): + uuid: str + peer_or_value: Any + edge_1: DatabaseEdge + edge_2: DatabaseEdge + + +async def get_database_edges_state( + db: InfrahubDatabase, + node_uuids: list[str], + rel_identiers: list[str], +) -> set[DatabasePath]: + query = """ + MATCH (n:Node)-[r1:IS_RELATED]-(r:Relationship)-[r2]-(peer) + WHERE n.uuid in $node_uuids + AND r.name in $rel_identifiers + AND r1.branch = r2.branch + RETURN n, r1, r2, peer + """ + results = await db.execute_query(query=query, params={"node_uuids": node_uuids, "rel_identifiers": rel_identiers}) + retrieved_path_tuples = set() + for result in results: + node_uuid = result.get("n").get("uuid") + r1 = result.get("r1") + r2 = result.get("r2") + edges_tuples = [] + for edge in [r1, r2]: + to_time_str = r1.get("to") + to_time = Timestamp(to_time_str) if to_time_str else None + edges_tuples.append( + DatabaseEdge( + type=edge.type, + branch=edge.get("branch"), + status=edge.get("status"), + from_time=Timestamp(edge.get("from")), + to_time=to_time, + ) + ) + peer = result.get("peer") + peer_id_or_value = peer.get("uuid", peer.get("value")) + retrieved_path_tuples.add( + DatabasePath(uuid=node_uuid, edge_1=edges_tuples[0], edge_2=edges_tuples[1], peer_or_value=peer_id_or_value) + ) + return retrieved_path_tuples + + +class TestRelationshipsWithRebase: + async def verify_database_state_cardinality_one( + self, + db: InfrahubDatabase, + car_uuid: str, + main_peer_id: str, + branch_peer_id: str, + branch_name: str, + rebase_time: Timestamp, + ): + database_paths = await get_database_edges_state( + db=db, node_uuids=[car_uuid], rel_identiers=["testcar__testperson"] + ) + # node_uuid, (type, branch, status, from_time == rebased_time, to_time == rebased_time) * 2, uuid OR value + retrieved_path_tuples = { + ( + db_path.uuid, + ( + db_path.edge_1.type, + db_path.edge_1.branch, + db_path.edge_1.status, + db_path.edge_1.from_time == rebase_time, + db_path.edge_1.to_time == rebase_time, + ), + ( + db_path.edge_2.type, + db_path.edge_2.branch, + db_path.edge_2.status, + db_path.edge_2.from_time == rebase_time, + db_path.edge_2.to_time == rebase_time, + ), + db_path.peer_or_value, + ) + for db_path in database_paths + } + + expected_path_tuples = { + ( + car_uuid, + ("IS_RELATED", "main", "active", False, False), + ("IS_RELATED", "main", "active", False, False), + main_peer_id, + ), + ( + car_uuid, + ("IS_RELATED", "main", "active", False, False), + ("IS_VISIBLE", "main", "active", False, False), + True, + ), + ( + car_uuid, + ("IS_RELATED", "main", "active", False, False), + ("IS_PROTECTED", "main", "active", False, False), + False, + ), + ( + car_uuid, + ("IS_RELATED", branch_name, "deleted", True, False), + ("IS_RELATED", branch_name, "deleted", True, False), + main_peer_id, + ), + ( + car_uuid, + ("IS_RELATED", branch_name, "deleted", True, False), + ("IS_VISIBLE", branch_name, "deleted", True, False), + True, + ), + ( + car_uuid, + ("IS_RELATED", branch_name, "deleted", True, False), + ("IS_PROTECTED", branch_name, "deleted", True, False), + False, + ), + ( + car_uuid, + ("IS_RELATED", branch_name, "active", True, False), + ("IS_RELATED", branch_name, "active", True, False), + branch_peer_id, + ), + ( + car_uuid, + ("IS_RELATED", branch_name, "active", True, False), + ("IS_VISIBLE", branch_name, "active", True, False), + True, + ), + ( + car_uuid, + ("IS_RELATED", branch_name, "active", True, False), + ("IS_PROTECTED", branch_name, "active", True, False), + False, + ), + } + + assert expected_path_tuples == retrieved_path_tuples + + @pytest.mark.parametrize("num_updates", [2, 3]) + async def test_rebase_cardinality_one( + self, + db: InfrahubDatabase, + default_branch: Branch, + car_person_schema: SchemaBranch, + person_john_main: Node, + person_alfred_main: Node, + person_jane_main: Node, + person_jim_main: Node, + car_accord_main: Node, + num_updates: int, + ): + branch_2 = await create_branch(db=db, branch_name="branch_2") + car_branch = await NodeManager.get_one(db=db, id=car_accord_main.id, branch=branch_2) + await car_branch.owner.update(db=db, data=person_alfred_main) + await car_branch.save(db=db) + if num_updates > 2: + car_branch = await NodeManager.get_one(db=db, id=car_accord_main.id, branch=branch_2) + await car_branch.owner.update(db=db, data=person_jim_main) + await car_branch.save(db=db) + car_branch = await NodeManager.get_one(db=db, id=car_accord_main.id, branch=branch_2) + await car_branch.owner.update(db=db, data=person_jane_main) + await car_branch.save(db=db) + + rebase_time = Timestamp() + await branch_2.rebase(db=db, at=rebase_time) + + rebased_car = await NodeManager.get_one(db=db, branch=branch_2, id=car_branch.id) + owner_peer = await rebased_car.owner.get_peer(db=db) + assert owner_peer.id == person_jane_main.id + main_car = await NodeManager.get_one(db=db, id=car_branch.id) + owner_peer = await main_car.owner.get_peer(db=db) + assert owner_peer.id == person_john_main.id + await self.verify_database_state_cardinality_one( + db=db, + car_uuid=car_accord_main.id, + main_peer_id=person_john_main.id, + branch_peer_id=person_jane_main.id, + branch_name=branch_2.name, + rebase_time=rebase_time, + ) + + async def verify_database_state_cardinality_many( + self, + db: InfrahubDatabase, + person_uuids: list[str], + car_person_id_map_main: dict[str, str], + car_person_id_map_branch: dict[str, str], + branch_name: str, + rebase_time: Timestamp, + ): + database_paths = await get_database_edges_state( + db=db, node_uuids=person_uuids, rel_identiers=["testcar__testperson"] + ) + retrieved_path_tuples = { + ( + db_path.uuid, + ( + db_path.edge_1.type, + db_path.edge_1.branch, + db_path.edge_1.status, + db_path.edge_1.from_time == rebase_time, + db_path.edge_1.to_time == rebase_time, + ), + ( + db_path.edge_2.type, + db_path.edge_2.branch, + db_path.edge_2.status, + db_path.edge_2.from_time == rebase_time, + db_path.edge_2.to_time == rebase_time, + ), + db_path.peer_or_value, + ) + for db_path in database_paths + } + + expected_path_tuples = set() + for person_uuid in person_uuids: + expected_car_ids_main = {c_id for c_id, p_id in car_person_id_map_main.items() if p_id == person_uuid} + for car_id in expected_car_ids_main: + expected_path_tuples.update( + ( + person_uuid, + ("IS_RELATED", "main", "active", False, False), + (edge_type, "main", "active", False, False), + peer_or_value, + ) + for edge_type, peer_or_value in ( + ("IS_RELATED", car_id), + ("IS_VISIBLE", True), + ("IS_PROTECTED", False), + ) + ) + expected_path_tuples.update( + ( + person_uuid, + ("IS_RELATED", branch_name, "deleted", True, False), + (edge_type, branch_name, "deleted", True, False), + peer_or_value, + ) + for edge_type, peer_or_value in ( + ("IS_RELATED", car_id), + ("IS_VISIBLE", True), + ("IS_PROTECTED", False), + ) + ) + + expected_car_ids_branch = {c_id for c_id, p_id in car_person_id_map_branch.items() if p_id == person_uuid} + for car_id in expected_car_ids_branch: + expected_path_tuples.update( + ( + person_uuid, + ("IS_RELATED", branch_name, "active", True, False), + (edge_type, branch_name, "active", True, False), + peer_or_value, + ) + for edge_type, peer_or_value in ( + ("IS_RELATED", car_id), + ("IS_VISIBLE", True), + ("IS_PROTECTED", False), + ) + ) + + for ept in expected_path_tuples: + assert ept in retrieved_path_tuples + + assert expected_path_tuples == retrieved_path_tuples + + async def test_rebase_cardinality_many( + self, + db: InfrahubDatabase, + default_branch: Branch, + car_person_schema: SchemaBranch, + ): + people = [] + cars = [] + car_person_id_map_main = {} + for i in range(3): + person = await Node.init(db=db, schema="TestPerson") + await person.new(db=db, name=f"Person{i}") + await person.save(db=db) + people.append(person) + for j in range(2): + car = await Node.init(db=db, schema="TestCar") + await car.new(db=db, name=f"Car{i}{j}", owner=person) + await car.save(db=db) + cars.append(car) + car_person_id_map_main[car.id] = person.id + branch_2 = await create_branch(db=db, branch_name="branch_2") + # make a bunch of branch updates + car_person_id_map_branch = {} + for _ in range(3): + for car in cars: + branch_car = await NodeManager.get_one(db=db, branch=branch_2, id=car.id) + owner_peer = await branch_car.owner.get_peer(db=db) + random_person = choice([p for p in people if p.id != owner_peer.id]) + await branch_car.owner.update(db=db, data=random_person) + await branch_car.save(db=db) + car_person_id_map_branch[car.id] = random_person.id + + rebase_time = Timestamp() + await branch_2.rebase(db=db, at=rebase_time) + + for person in people: + main_person = await NodeManager.get_one(db=db, branch=default_branch, id=person.id) + expected_car_ids = {c_id for c_id, p_id in car_person_id_map_main.items() if p_id == person.id} + car_peers = await main_person.cars.get_peers(db=db) + retrieved_car_ids = set(car_peers.keys()) + assert expected_car_ids == retrieved_car_ids + for car in cars: + main_car = await NodeManager.get_one(db=db, branch=default_branch, id=car.id) + owner_peer = await main_car.owner.get_peer(db=db) + assert owner_peer.id == car_person_id_map_main[car.id] + + for person in people: + rebased_person = await NodeManager.get_one(db=db, branch=branch_2, id=person.id) + expected_car_ids = {c_id for c_id, p_id in car_person_id_map_branch.items() if p_id == person.id} + car_peers = await rebased_person.cars.get_peers(db=db) + retrieved_car_ids = set(car_peers.keys()) + assert expected_car_ids == retrieved_car_ids + for car in cars: + rebased_car = await NodeManager.get_one(db=db, branch=branch_2, id=car.id) + owner_peer = await rebased_car.owner.get_peer(db=db) + assert owner_peer.id == car_person_id_map_branch[car.id] + await self.verify_database_state_cardinality_many( + db=db, + person_uuids=[p.id for p in people], + car_person_id_map_main=car_person_id_map_main, + car_person_id_map_branch=car_person_id_map_branch, + branch_name=branch_2.name, + rebase_time=rebase_time, + ) diff --git a/backend/tests/unit/git/test_git_rpc.py b/backend/tests/unit/git/test_git_rpc.py index 0dc1e9c3d9..c4edf9831d 100644 --- a/backend/tests/unit/git/test_git_rpc.py +++ b/backend/tests/unit/git/test_git_rpc.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any from unittest.mock import AsyncMock, patch from infrahub_sdk import Config, InfrahubClient @@ -34,9 +34,9 @@ async def __aenter__(self, *args: Any, **kwargs: Any) -> Self: async def __aexit__( self, - exc_type: Optional[type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, ) -> None: pass diff --git a/backend/tests/unit/graphql/auth/query_permission_checker/test_default_branch_checker.py b/backend/tests/unit/graphql/auth/query_permission_checker/test_default_branch_checker.py index f0ee11de1a..0069c46135 100644 --- a/backend/tests/unit/graphql/auth/query_permission_checker/test_default_branch_checker.py +++ b/backend/tests/unit/graphql/auth/query_permission_checker/test_default_branch_checker.py @@ -1,13 +1,13 @@ from __future__ import annotations from typing import TYPE_CHECKING -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch from uuid import uuid4 import pytest from infrahub.auth import AccountSession, AuthType -from infrahub.core.constants import AccountRole, GlobalPermissions, InfrahubKind +from infrahub.core.constants import AccountRole, GlobalPermissions, InfrahubKind, PermissionDecision from infrahub.core.node import Node from infrahub.core.registry import registry from infrahub.exceptions import PermissionDeniedError @@ -39,7 +39,7 @@ async def test_setup( permission = await Node.init(db=db, schema=InfrahubKind.GLOBALPERMISSION) await permission.new( - db=db, name=GlobalPermissions.EDIT_DEFAULT_BRANCH.value, action=GlobalPermissions.EDIT_DEFAULT_BRANCH.value + db=db, action=GlobalPermissions.EDIT_DEFAULT_BRANCH.value, decision=PermissionDecision.ALLOW_ALL.value ) await permission.save(db=db) @@ -68,8 +68,9 @@ async def test_supports_default_branch_permission_accounts( self, user: AccountSession, db: InfrahubDatabase, permissions_helper: PermissionsHelper ): checker = DefaultBranchPermissionChecker() - is_supported = await checker.supports(db=db, account_session=user, branch=permissions_helper.default_branch) - assert is_supported == user.authenticated + with patch("infrahub.config.SETTINGS.main.allow_anonymous_access", False): + is_supported = await checker.supports(db=db, account_session=user, branch=permissions_helper.default_branch) + assert is_supported == user.authenticated @pytest.mark.parametrize( "contains_mutation,branch_name", diff --git a/backend/tests/unit/graphql/auth/query_permission_checker/test_merge_operation_checker.py b/backend/tests/unit/graphql/auth/query_permission_checker/test_merge_operation_checker.py index ed31289eb4..c2fe3079df 100644 --- a/backend/tests/unit/graphql/auth/query_permission_checker/test_merge_operation_checker.py +++ b/backend/tests/unit/graphql/auth/query_permission_checker/test_merge_operation_checker.py @@ -1,13 +1,13 @@ from __future__ import annotations from typing import TYPE_CHECKING -from unittest.mock import AsyncMock, MagicMock +from unittest.mock import AsyncMock, MagicMock, patch from uuid import uuid4 import pytest from infrahub.auth import AccountSession, AuthType -from infrahub.core.constants import AccountRole, GlobalPermissions, InfrahubKind +from infrahub.core.constants import AccountRole, GlobalPermissions, InfrahubKind, PermissionDecision from infrahub.core.node import Node from infrahub.core.registry import registry from infrahub.exceptions import PermissionDeniedError @@ -39,7 +39,7 @@ async def test_setup( permission = await Node.init(db=db, schema=InfrahubKind.GLOBALPERMISSION) await permission.new( - db=db, name=GlobalPermissions.MERGE_BRANCH.value, action=GlobalPermissions.MERGE_BRANCH.value + db=db, action=GlobalPermissions.MERGE_BRANCH.value, decision=PermissionDecision.ALLOW_ALL.value ) await permission.save(db=db) @@ -68,8 +68,9 @@ async def test_supports_merge_branch_permission_accounts( self, user: AccountSession, db: InfrahubDatabase, permissions_helper: PermissionsHelper ): checker = MergeBranchPermissionChecker() - is_supported = await checker.supports(db=db, account_session=user, branch=permissions_helper.default_branch) - assert is_supported == user.authenticated + with patch("infrahub.config.SETTINGS.main.allow_anonymous_access", False): + is_supported = await checker.supports(db=db, account_session=user, branch=permissions_helper.default_branch) + assert is_supported == user.authenticated @pytest.mark.parametrize( "operation_name,checker_resolution", diff --git a/backend/tests/unit/graphql/auth/query_permission_checker/test_object_permission_checker.py b/backend/tests/unit/graphql/auth/query_permission_checker/test_object_permission_checker.py index 8ef865e5e0..7a6de5da22 100644 --- a/backend/tests/unit/graphql/auth/query_permission_checker/test_object_permission_checker.py +++ b/backend/tests/unit/graphql/auth/query_permission_checker/test_object_permission_checker.py @@ -1,6 +1,7 @@ from __future__ import annotations from typing import TYPE_CHECKING +from unittest.mock import patch from uuid import uuid4 import pytest @@ -232,14 +233,12 @@ async def test_setup( permissions = [] for object_permission in [ ObjectPermission( - id="", namespace="Builtin", name="*", action=PermissionAction.ANY.value, decision=PermissionDecision.ALLOW_DEFAULT.value, ), ObjectPermission( - id="", namespace="Core", name="GraphQLQuery", action=PermissionAction.VIEW.value, @@ -376,7 +375,7 @@ async def test_setup( permission = await Node.init(db=db, schema=InfrahubKind.GLOBALPERMISSION) await permission.new( - db=db, name=GlobalPermissions.MANAGE_ACCOUNTS.value, action=GlobalPermissions.MANAGE_ACCOUNTS.value + db=db, action=GlobalPermissions.MANAGE_ACCOUNTS.value, decision=PermissionDecision.ALLOW_ALL.value ) await permission.save(db=db) @@ -405,8 +404,9 @@ async def test_supports_manage_accounts_permission_accounts( self, user: AccountSession, db: InfrahubDatabase, permissions_helper: PermissionsHelper ): checker = AccountManagerPermissionChecker() - is_supported = await checker.supports(db=db, account_session=user, branch=permissions_helper.default_branch) - assert is_supported == user.authenticated + with patch("infrahub.config.SETTINGS.main.allow_anonymous_access", False): + is_supported = await checker.supports(db=db, account_session=user, branch=permissions_helper.default_branch) + assert is_supported == user.authenticated @pytest.mark.parametrize("operation", [MUTATION_ACCOUNT, MUTATION_ACCOUNT_GROUP, MUTATION_ACCOUNT_ROLE]) async def test_account_with_permission( @@ -491,7 +491,7 @@ async def test_setup( permission = await Node.init(db=db, schema=InfrahubKind.GLOBALPERMISSION) await permission.new( - db=db, name=GlobalPermissions.MANAGE_PERMISSIONS.value, action=GlobalPermissions.MANAGE_PERMISSIONS.value + db=db, action=GlobalPermissions.MANAGE_PERMISSIONS.value, decision=PermissionDecision.ALLOW_ALL.value ) await permission.save(db=db) @@ -520,8 +520,9 @@ async def test_supports_manage_accounts_permission_accounts( self, user: AccountSession, db: InfrahubDatabase, permissions_helper: PermissionsHelper ): checker = PermissionManagerPermissionChecker() - is_supported = await checker.supports(db=db, account_session=user, branch=permissions_helper.default_branch) - assert is_supported == user.authenticated + with patch("infrahub.config.SETTINGS.main.allow_anonymous_access", False): + is_supported = await checker.supports(db=db, account_session=user, branch=permissions_helper.default_branch) + assert is_supported == user.authenticated @pytest.mark.parametrize( "operation", [MUTATION_GLOBAL_PERMISSION, MUTATION_OBJECT_PERMISSION, QUERY_ACCOUNT_PERMISSIONS] @@ -605,7 +606,7 @@ async def test_setup( permission = await Node.init(db=db, schema=InfrahubKind.GLOBALPERMISSION) await permission.new( - db=db, name=GlobalPermissions.MANAGE_REPOSITORIES.value, action=GlobalPermissions.MANAGE_REPOSITORIES.value + db=db, action=GlobalPermissions.MANAGE_REPOSITORIES.value, decision=PermissionDecision.ALLOW_ALL.value ) await permission.save(db=db) @@ -634,8 +635,9 @@ async def test_supports_manage_repositories_permission_accounts( self, user: AccountSession, db: InfrahubDatabase, permissions_helper: PermissionsHelper ): checker = AccountManagerPermissionChecker() - is_supported = await checker.supports(db=db, account_session=user, branch=permissions_helper.default_branch) - assert is_supported == user.authenticated + with patch("infrahub.config.SETTINGS.main.allow_anonymous_access", False): + is_supported = await checker.supports(db=db, account_session=user, branch=permissions_helper.default_branch) + assert is_supported == user.authenticated @pytest.mark.parametrize( "operation", [MUTATION_REPOSITORY, MUTATION_READONLY_REPOSITORY, MUTATION_GENERIC_REPOSITORY] diff --git a/backend/tests/unit/graphql/auth/query_permission_checker/test_super_admin_checker.py b/backend/tests/unit/graphql/auth/query_permission_checker/test_super_admin_checker.py index e4f715f4a8..47a5eca97e 100644 --- a/backend/tests/unit/graphql/auth/query_permission_checker/test_super_admin_checker.py +++ b/backend/tests/unit/graphql/auth/query_permission_checker/test_super_admin_checker.py @@ -1,13 +1,13 @@ from __future__ import annotations from typing import TYPE_CHECKING -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch from uuid import uuid4 import pytest from infrahub.auth import AccountSession, AuthType -from infrahub.core.constants import AccountRole, GlobalPermissions, InfrahubKind +from infrahub.core.constants import AccountRole, GlobalPermissions, InfrahubKind, PermissionDecision from infrahub.core.node import Node from infrahub.core.registry import registry from infrahub.graphql.analyzer import InfrahubGraphQLQueryAnalyzer @@ -38,7 +38,7 @@ async def test_setup( permission = await Node.init(db=db, schema=InfrahubKind.GLOBALPERMISSION) await permission.new( - db=db, name=GlobalPermissions.SUPER_ADMIN.value, action=GlobalPermissions.SUPER_ADMIN.value + db=db, action=GlobalPermissions.SUPER_ADMIN.value, decision=PermissionDecision.ALLOW_ALL.value ) await permission.save(db=db) @@ -67,8 +67,9 @@ async def test_supports_super_admin_permission_accounts( self, user: AccountSession, db: InfrahubDatabase, permissions_helper: PermissionsHelper ): checker = SuperAdminPermissionChecker() - is_supported = await checker.supports(db=db, account_session=user, branch=permissions_helper.default_branch) - assert is_supported == user.authenticated + with patch("infrahub.config.SETTINGS.main.allow_anonymous_access", False): + is_supported = await checker.supports(db=db, account_session=user, branch=permissions_helper.default_branch) + assert is_supported == user.authenticated async def test_account_with_permission(self, db: InfrahubDatabase, permissions_helper: PermissionsHelper): checker = SuperAdminPermissionChecker() diff --git a/backend/tests/unit/graphql/auth/test_default_checker.py b/backend/tests/unit/graphql/auth/test_default_checker.py deleted file mode 100644 index d90d1e3f62..0000000000 --- a/backend/tests/unit/graphql/auth/test_default_checker.py +++ /dev/null @@ -1,37 +0,0 @@ -from unittest.mock import AsyncMock, MagicMock - -import pytest - -from infrahub.auth import AccountSession, AuthType -from infrahub.core.branch import Branch -from infrahub.core.constants import AccountRole -from infrahub.database import InfrahubDatabase -from infrahub.exceptions import AuthorizationError -from infrahub.graphql.analyzer import InfrahubGraphQLQueryAnalyzer -from infrahub.graphql.auth.query_permission_checker.default_checker import DefaultGraphQLPermissionChecker -from infrahub.graphql.initialization import GraphqlParams - - -class TestDefaultAuthChecker: - def setup_method(self): - self.account_session = AccountSession(account_id="abc", auth_type=AuthType.JWT) - self.graphql_query = AsyncMock(spec=InfrahubGraphQLQueryAnalyzer) - self.checker = DefaultGraphQLPermissionChecker() - - @pytest.mark.parametrize("role", [x.value for x in AccountRole]) - async def test_supports_all_accounts(self, db: InfrahubDatabase, branch: Branch, role): - self.account_session.role = role - - is_supported = await self.checker.supports(db=db, account_session=self.account_session, branch=branch) - - assert is_supported is True - - async def test_always_raises_error(self, db: InfrahubDatabase, branch: Branch): - with pytest.raises(AuthorizationError): - await self.checker.check( - db=db, - account_session=self.account_session, - analyzed_query=self.graphql_query, - query_parameters=MagicMock(spec=GraphqlParams), - branch=branch, - ) diff --git a/backend/tests/unit/graphql/mutations/test_branch.py b/backend/tests/unit/graphql/mutations/test_branch.py index 11cd8a8f39..2b97a4c57b 100644 --- a/backend/tests/unit/graphql/mutations/test_branch.py +++ b/backend/tests/unit/graphql/mutations/test_branch.py @@ -5,6 +5,7 @@ from infrahub.core.branch import Branch from infrahub.core.constants import InfrahubKind from infrahub.core.initialization import create_branch +from infrahub.core.manager import NodeManager from infrahub.core.node import Node from infrahub.database import InfrahubDatabase from infrahub.graphql.initialization import prepare_graphql_params @@ -329,36 +330,6 @@ async def test_branch_create_with_repositories( assert await Branch.get_by_name(db=db, name="branch2") -async def test_branch_rebase(db: InfrahubDatabase, default_branch: Branch, car_person_schema, session_admin): - branch2 = await create_branch(db=db, branch_name="branch2") - - query = """ - mutation { - BranchRebase(data: { name: "branch2" }) { - ok - object { - id - } - } - } - """ - recorder = BusRecorder() - service = InfrahubServices(message_bus=recorder) - result = await graphql_mutation( - query=query, db=db, branch=default_branch, service=service, account_session=session_admin - ) - - assert result.errors is None - assert result.data - assert result.data["BranchRebase"]["ok"] is True - assert result.data["BranchRebase"]["object"]["id"] == str(branch2.uuid) - - new_branch2 = await Branch.get_by_name(db=db, name="branch2") - assert new_branch2.branched_from != branch2.branched_from - - assert recorder.seen_routing_keys == ["event.branch.rebased"] - - async def test_branch_rebase_wrong_branch( db: InfrahubDatabase, default_branch: Branch, car_person_schema, session_admin ): @@ -453,12 +424,14 @@ async def test_branch_update_description(db: InfrahubDatabase, base_dataset_02): assert branch4_updated.description == "testing" -async def test_branch_merge(db: InfrahubDatabase, base_dataset_02, register_core_models_schema, session_admin): +async def test_branch_merge_wrong_branch( + db: InfrahubDatabase, base_dataset_02, register_core_models_schema, session_admin +): branch1 = await Branch.get_by_name(db=db, name="branch1") query = """ mutation { - BranchMerge(data: { name: "branch1" }) { + BranchMerge(data: { name: "branch99" }) { ok object { id @@ -466,8 +439,10 @@ async def test_branch_merge(db: InfrahubDatabase, base_dataset_02, register_core } } """ + recorder = BusRecorder() + service = InfrahubServices(message_bus=recorder) gql_params = prepare_graphql_params( - db=db, include_subscription=False, branch=branch1, account_session=session_admin + db=db, include_subscription=False, branch=branch1, account_session=session_admin, service=service ) result = await graphql( schema=gql_params.schema, @@ -477,7 +452,44 @@ async def test_branch_merge(db: InfrahubDatabase, base_dataset_02, register_core variable_values={}, ) - assert result.errors is None - assert result.data - assert result.data["BranchMerge"]["ok"] is True - assert result.data["BranchMerge"]["object"]["id"] == str(branch1.uuid) + assert result.errors + assert len(result.errors) == 1 + assert result.errors[0].message == "Branch: branch99 not found." + + +async def test_branch_merge_with_conflict_fails(db: InfrahubDatabase, car_person_schema, car_camry_main, session_admin): + query = """ + mutation { + BranchMerge(data: { name: "branch2" }) { + ok + object { + id + } + } + } + """ + + branch2 = await create_branch(db=db, branch_name="branch2") + car_main = await NodeManager.get_one(db=db, id=car_camry_main.id) + car_main.name.value += "-main" + await car_main.save(db=db) + car_branch = await NodeManager.get_one(db=db, branch=branch2, id=car_camry_main.id) + car_branch.name.value += "-branch" + await car_branch.save(db=db) + + recorder = BusRecorder() + service = InfrahubServices(message_bus=recorder) + gql_params = prepare_graphql_params( + db=db, include_subscription=False, branch=branch2, account_session=session_admin, service=service + ) + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors + assert len(result.errors) == 1 + assert "contains conflicts with the default branch" in result.errors[0].message diff --git a/backend/tests/unit/graphql/mutations/test_proposed_change.py b/backend/tests/unit/graphql/mutations/test_proposed_change.py index c43ae7303c..d79793ceb5 100644 --- a/backend/tests/unit/graphql/mutations/test_proposed_change.py +++ b/backend/tests/unit/graphql/mutations/test_proposed_change.py @@ -221,7 +221,7 @@ async def test_merge_proposed_change_permission_failure( ) assert update_status.errors - assert "You do not have the permission to merge proposed changes" == update_status.errors[0].message + assert update_status.errors[0].message == "You do not have the permission to merge proposed changes" update_status = await graphql_mutation( query=UPDATE_PROPOSED_CHANGE, diff --git a/backend/tests/unit/graphql/queries/test_list_permissions.py b/backend/tests/unit/graphql/queries/test_list_permissions.py index 1590526bb5..f6f8a65dd9 100644 --- a/backend/tests/unit/graphql/queries/test_list_permissions.py +++ b/backend/tests/unit/graphql/queries/test_list_permissions.py @@ -12,8 +12,7 @@ from infrahub.core.node import Node from infrahub.core.registry import registry from infrahub.graphql.initialization import prepare_graphql_params -from infrahub.graphql.types.permission import PermissionDecision -from infrahub.permissions.constants import PermissionDecisionFlag +from infrahub.permissions.constants import BranchRelativePermissionDecision, PermissionDecisionFlag from infrahub.permissions.local_backend import LocalPermissionBackend if TYPE_CHECKING: @@ -104,35 +103,30 @@ async def test_setup( permissions = [] for object_permission in [ ObjectPermission( - id="", namespace="Builtin", name="*", action=PermissionAction.VIEW.value, decision=PermissionDecisionFlag.ALLOW_ALL, ), ObjectPermission( - id="", namespace="Builtin", name="*", action=PermissionAction.CREATE.value, decision=PermissionDecisionFlag.ALLOW_OTHER, ), ObjectPermission( - id="", namespace="Builtin", name="*", action=PermissionAction.DELETE.value, decision=PermissionDecisionFlag.ALLOW_OTHER, ), ObjectPermission( - id="", namespace="Core", name="*", action=PermissionAction.ANY.value, decision=PermissionDecisionFlag.ALLOW_OTHER, ), ObjectPermission( - id="", namespace="Core", name="*", action=PermissionAction.VIEW.value, @@ -178,10 +172,10 @@ async def test_first_account_tags(self, db: InfrahubDatabase, permissions_helper assert result.data["BuiltinTag"]["permissions"]["edges"][0] == { "node": { "kind": "BuiltinTag", - "create": PermissionDecision.ALLOW_OTHER.name, - "update": PermissionDecision.DENY.name, - "delete": PermissionDecision.ALLOW_OTHER.name, - "view": PermissionDecision.ALLOW_ALL.name, + "create": BranchRelativePermissionDecision.ALLOW_OTHER.name, + "update": BranchRelativePermissionDecision.DENY.name, + "delete": BranchRelativePermissionDecision.ALLOW_OTHER.name, + "view": BranchRelativePermissionDecision.ALLOW.name, } } @@ -201,10 +195,10 @@ async def test_first_account_tags_non_main_branch( assert result.data["BuiltinTag"]["permissions"]["edges"][0] == { "node": { "kind": "BuiltinTag", - "create": PermissionDecision.ALLOW_OTHER.name, - "update": PermissionDecision.DENY.name, - "delete": PermissionDecision.ALLOW_OTHER.name, - "view": PermissionDecision.ALLOW_ALL.name, + "create": BranchRelativePermissionDecision.ALLOW.name, + "update": BranchRelativePermissionDecision.DENY.name, + "delete": BranchRelativePermissionDecision.ALLOW.name, + "view": BranchRelativePermissionDecision.ALLOW.name, } } @@ -231,28 +225,28 @@ async def test_first_account_list_permissions_for_generics( assert { "node": { "kind": "CoreGenericRepository", - "create": PermissionDecision.ALLOW_OTHER.name, - "update": PermissionDecision.ALLOW_OTHER.name, - "delete": PermissionDecision.ALLOW_OTHER.name, - "view": PermissionDecision.ALLOW_ALL.name, + "create": BranchRelativePermissionDecision.ALLOW_OTHER.name, + "update": BranchRelativePermissionDecision.ALLOW_OTHER.name, + "delete": BranchRelativePermissionDecision.ALLOW_OTHER.name, + "view": BranchRelativePermissionDecision.ALLOW.name, } } in result.data["CoreGenericRepository"]["permissions"]["edges"] assert { "node": { "kind": "CoreRepository", - "create": PermissionDecision.ALLOW_OTHER.name, - "update": PermissionDecision.ALLOW_OTHER.name, - "delete": PermissionDecision.ALLOW_OTHER.name, - "view": PermissionDecision.ALLOW_ALL.name, + "create": BranchRelativePermissionDecision.ALLOW_OTHER.name, + "update": BranchRelativePermissionDecision.ALLOW_OTHER.name, + "delete": BranchRelativePermissionDecision.ALLOW_OTHER.name, + "view": BranchRelativePermissionDecision.ALLOW.name, } } in result.data["CoreGenericRepository"]["permissions"]["edges"] assert { "node": { "kind": "CoreReadOnlyRepository", - "create": PermissionDecision.ALLOW_OTHER.name, - "update": PermissionDecision.ALLOW_OTHER.name, - "delete": PermissionDecision.ALLOW_OTHER.name, - "view": PermissionDecision.ALLOW_ALL.name, + "create": BranchRelativePermissionDecision.ALLOW_OTHER.name, + "update": BranchRelativePermissionDecision.ALLOW_OTHER.name, + "delete": BranchRelativePermissionDecision.ALLOW_OTHER.name, + "view": BranchRelativePermissionDecision.ALLOW.name, } } in result.data["CoreGenericRepository"]["permissions"]["edges"] @@ -275,10 +269,10 @@ async def test_first_account_account_role( assert result.data["CoreAccountRole"]["permissions"]["edges"][0] == { "node": { "kind": "CoreAccountRole", - "create": PermissionDecision.ALLOW_OTHER.name, - "update": PermissionDecision.ALLOW_OTHER.name, - "delete": PermissionDecision.ALLOW_OTHER.name, - "view": PermissionDecision.ALLOW_ALL.name, + "create": BranchRelativePermissionDecision.ALLOW_OTHER.name, + "update": BranchRelativePermissionDecision.ALLOW_OTHER.name, + "delete": BranchRelativePermissionDecision.ALLOW_OTHER.name, + "view": BranchRelativePermissionDecision.ALLOW.name, } } assert result.data["CoreAccountRole"]["edges"][0]["node"]["display_label"] == "admin" @@ -319,28 +313,24 @@ async def test_setup( permissions = [] for object_permission in [ ObjectPermission( - id="", namespace="Builtin", name="*", action=PermissionAction.VIEW.value, decision=PermissionDecisionFlag.ALLOW_ALL, ), ObjectPermission( - id="", namespace="Builtin", name="*", action=PermissionAction.CREATE.value, decision=PermissionDecisionFlag.ALLOW_ALL, ), ObjectPermission( - id="", namespace="Builtin", name="*", action=PermissionAction.DELETE.value, decision=PermissionDecisionFlag.ALLOW_ALL, ), ObjectPermission( - id="", namespace="Builtin", name="*", action=PermissionAction.UPDATE.value, @@ -393,7 +383,7 @@ async def test_first_account_tags_main_branch( assert result.data assert result.data["BuiltinTag"]["count"] == 1 assert result.data["BuiltinTag"]["edges"][0]["node"]["name"]["permissions"] == { - "update_value": PermissionDecision.ALLOW_OTHER.name + "update_value": BranchRelativePermissionDecision.ALLOW_OTHER.name } async def test_first_account_tags_non_main_branch( @@ -415,5 +405,5 @@ async def test_first_account_tags_non_main_branch( assert result.data assert result.data["BuiltinTag"]["count"] == 1 assert result.data["BuiltinTag"]["edges"][0]["node"]["name"]["permissions"] == { - "update_value": PermissionDecision.ALLOW_OTHER.name + "update_value": BranchRelativePermissionDecision.ALLOW.name } diff --git a/backend/tests/unit/graphql/queries/test_resource_pool.py b/backend/tests/unit/graphql/queries/test_resource_pool.py index 82a398c295..dcafe219cf 100644 --- a/backend/tests/unit/graphql/queries/test_resource_pool.py +++ b/backend/tests/unit/graphql/queries/test_resource_pool.py @@ -633,6 +633,7 @@ async def test_number_pool_utilization(db: InfrahubDatabase, default_branch: Bra assert first.data assert second.data assert third.data + second_id = second.data["TestingTicketCreate"]["object"]["id"] utilization = await graphql( schema=gql_params.schema, @@ -670,6 +671,32 @@ async def test_number_pool_utilization(db: InfrahubDatabase, default_branch: Bra numbers = [entry["node"]["display_label"] for entry in allocation.data["InfrahubResourcePoolAllocated"]["edges"]] assert sorted(numbers) == ["1", "2", "3"] + remove_two = await graphql( + schema=gql_params.schema, + source=DELETE_TICKET, + context_value=gql_params.context, + root_value=None, + variable_values={"id": second_id}, + ) + assert not remove_two.errors + + allocation = await graphql( + schema=gql_params.schema, + source=POOL_ALLOCATION, + context_value=gql_params.context, + root_value=None, + variable_values={ + "pool_id": pool_id, + "resource_id": pool_id, + }, + ) + + assert not allocation.errors + assert allocation.data + assert allocation.data["InfrahubResourcePoolAllocated"]["count"] == 2 + numbers = [entry["node"]["display_label"] for entry in allocation.data["InfrahubResourcePoolAllocated"]["edges"]] + assert sorted(numbers) == ["1", "3"] + CREATE_NUMBER_POOL = """ mutation CreateNumberPool( @@ -720,6 +747,14 @@ async def test_number_pool_utilization(db: InfrahubDatabase, default_branch: Bra } """ +DELETE_TICKET = """ +mutation DeleteTicket($id: String!) { + TestingTicketDelete(data: {id: $id}) { + ok + } +} +""" + POOL_UTILIZATION = """ query PoolUtilization($pool_id: String!) { InfrahubResourcePoolUtilization(pool_id: $pool_id) { diff --git a/backend/tests/unit/graphql/test_core_account.py b/backend/tests/unit/graphql/test_core_account.py index 6bd79b9894..45072d1095 100644 --- a/backend/tests/unit/graphql/test_core_account.py +++ b/backend/tests/unit/graphql/test_core_account.py @@ -86,25 +86,14 @@ async def test_permissions( assert result.errors is None perms = [edge["node"]["identifier"] for edge in result.data["InfrahubPermissions"]["global_permissions"]["edges"]] assert perms == [ - str( - GlobalPermission( - id="", - name=GlobalPermissions.SUPER_ADMIN.value, - action=GlobalPermissions.SUPER_ADMIN.value, - decision=PermissionDecision.ALLOW_ALL.value, - ) - ) + str(GlobalPermission(action=GlobalPermissions.SUPER_ADMIN.value, decision=PermissionDecision.ALLOW_ALL.value)) ] perms = [edge["node"]["identifier"] for edge in result.data["InfrahubPermissions"]["object_permissions"]["edges"]] assert perms == [ str( ObjectPermission( - id="", - namespace="*", - name="*", - action=PermissionAction.ANY.value, - decision=PermissionDecision.ALLOW_ALL.value, + namespace="*", name="*", action=PermissionAction.ANY.value, decision=PermissionDecision.ALLOW_ALL.value ) ) ] diff --git a/backend/tests/unit/graphql/test_diff_tree_query.py b/backend/tests/unit/graphql/test_diff_tree_query.py index 0caa337b3b..a389e278af 100644 --- a/backend/tests/unit/graphql/test_diff_tree_query.py +++ b/backend/tests/unit/graphql/test_diff_tree_query.py @@ -292,7 +292,8 @@ async def test_diff_tree_one_attr_change( after_change_datetime = datetime.now(tz=UTC) enriched_diff = await diff_coordinator.update_branch_diff(base_branch=default_branch, diff_branch=diff_branch) - enriched_conflict = enriched_diff.get_all_conflicts()[0] + enriched_conflict_map = enriched_diff.get_all_conflicts() + enriched_conflict = list(enriched_conflict_map.values())[0] await diff_repository.update_conflict_by_id( conflict_id=enriched_conflict.uuid, selection=ConflictSelection.DIFF_BRANCH ) diff --git a/backend/tests/unit/graphql/test_graphql_partial_match.py b/backend/tests/unit/graphql/test_graphql_partial_match.py index e1a5931b19..2bbb7dfdb1 100644 --- a/backend/tests/unit/graphql/test_graphql_partial_match.py +++ b/backend/tests/unit/graphql/test_graphql_partial_match.py @@ -217,6 +217,7 @@ async def test_query_filter_local_attrs_partial_match_values( assert result.errors is None assert result.data assert len(result.data["TestCriticality"]["edges"]) == 2 - assert ["green", "grey"] == sorted( - [node["node"]["name"]["value"] for node in result.data["TestCriticality"]["edges"]] - ) + assert sorted([node["node"]["name"]["value"] for node in result.data["TestCriticality"]["edges"]]) == [ + "green", + "grey", + ] diff --git a/backend/tests/unit/graphql/test_graphql_utils.py b/backend/tests/unit/graphql/test_graphql_utils.py index a30ae10d01..d2eac517d6 100644 --- a/backend/tests/unit/graphql/test_graphql_utils.py +++ b/backend/tests/unit/graphql/test_graphql_utils.py @@ -1,11 +1,30 @@ -from graphql import parse +from graphql import GraphQLSchema, parse from infrahub_sdk.utils import extract_fields +from infrahub.core import registry from infrahub.core.branch import Branch from infrahub.core.constants import InfrahubKind from infrahub.database import InfrahubDatabase from infrahub.graphql.analyzer import extract_schema_models -from infrahub.graphql.initialization import generate_graphql_schema +from infrahub.graphql.manager import GraphQLSchemaManager + + +def generate_graphql_schema( + db: InfrahubDatabase, # pylint: disable=unused-argument + branch: Branch | str, + include_query: bool = True, + include_mutation: bool = True, + include_subscription: bool = True, + include_types: bool = True, +) -> GraphQLSchema: + branch = registry.get_branch_from_registry(branch) + schema = registry.schema.get_schema_branch(name=branch.name) + return GraphQLSchemaManager(schema=schema).generate( + include_query=include_query, + include_mutation=include_mutation, + include_subscription=include_subscription, + include_types=include_types, + ) async def test_schema_models(db: InfrahubDatabase, default_branch: Branch, car_person_schema_generics, query_01: str): diff --git a/backend/tests/unit/graphql/test_manager.py b/backend/tests/unit/graphql/test_manager.py index 0657841991..6928414cad 100644 --- a/backend/tests/unit/graphql/test_manager.py +++ b/backend/tests/unit/graphql/test_manager.py @@ -1,9 +1,11 @@ import inspect import graphene +import pytest from infrahub.core import registry from infrahub.core.branch import Branch +from infrahub.core.timestamp import Timestamp from infrahub.database import InfrahubDatabase from infrahub.graphql.manager import GraphQLSchemaManager from infrahub.graphql.types import InfrahubObject @@ -258,3 +260,52 @@ async def test_generate_filters(db: InfrahubDatabase, default_branch: Branch, da "subscriber_of_groups__name__values", ] assert sorted(list(filters.keys())) == sorted(expected_filters) + + +@pytest.mark.parametrize( + "schema_changed_at_null,schema_hash_null", [(False, False), (True, False), (False, True), (True, True)] +) +async def test_branch_caching_hit( + db: InfrahubDatabase, + default_branch: Branch, + data_schema, + car_person_schema_generics, + schema_changed_at_null: bool, + schema_hash_null: bool, +): + default_branch.update_schema_hash() + same_branch = default_branch.model_copy() + if schema_changed_at_null: + same_branch.schema_changed_at = None + if schema_hash_null: + same_branch.schema_hash = None + schema_branch = registry.schema.get_schema_branch(default_branch.name) + + manager1 = GraphQLSchemaManager.get_manager_for_branch(branch=default_branch, schema_branch=schema_branch) + manager2 = GraphQLSchemaManager.get_manager_for_branch(branch=same_branch, schema_branch=schema_branch) + + assert manager1 is manager2 + + +@pytest.mark.parametrize("schema_changed_at_new,schema_hash_updated", [(True, False), (False, True), (True, True)]) +async def test_branch_caching_miss( + db: InfrahubDatabase, + default_branch: Branch, + data_schema, + car_person_schema_generics, + schema_changed_at_new: bool, + schema_hash_updated: bool, +): + default_branch.update_schema_hash() + same_branch = default_branch.model_copy() + schema_branch = registry.schema.get_schema_branch(default_branch.name) + if schema_changed_at_new: + same_branch.schema_changed_at = Timestamp().to_string() + if schema_hash_updated: + default_branch.schema_hash.main = "abc" + same_branch.update_schema_hash() + + manager1 = GraphQLSchemaManager.get_manager_for_branch(branch=default_branch, schema_branch=schema_branch) + manager2 = GraphQLSchemaManager.get_manager_for_branch(branch=same_branch, schema_branch=schema_branch) + + assert manager1 is not manager2 diff --git a/backend/tests/unit/graphql/test_mutation_artifact_definition.py b/backend/tests/unit/graphql/test_mutation_artifact_definition.py index f9318415fd..661ee850a9 100644 --- a/backend/tests/unit/graphql/test_mutation_artifact_definition.py +++ b/backend/tests/unit/graphql/test_mutation_artifact_definition.py @@ -1,4 +1,5 @@ from typing import Dict +from unittest.mock import call, patch import pytest from graphql import graphql @@ -8,9 +9,11 @@ from infrahub.core.manager import NodeManager from infrahub.core.node import Node from infrahub.database import InfrahubDatabase +from infrahub.git.models import RequestArtifactDefinitionGenerate from infrahub.graphql.initialization import prepare_graphql_params -from infrahub.message_bus import messages from infrahub.services import InfrahubServices +from infrahub.services.adapters.workflow.local import WorkflowLocalExecution +from infrahub.workflows.catalogue import REQUEST_ARTIFACT_DEFINITION_GENERATE from tests.adapters.message_bus import BusRecorder @@ -92,29 +95,38 @@ async def test_create_artifact_definition( transformation1.id, ) recorder = BusRecorder() - service = InfrahubServices(message_bus=recorder) + service = InfrahubServices(message_bus=recorder, workflow=WorkflowLocalExecution()) gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=branch, service=service) - result = await graphql( - schema=gql_params.schema, - source=query, - context_value=gql_params.context, - root_value=None, - variable_values={}, - ) - assert result.errors is None - assert result.data["CoreArtifactDefinitionCreate"]["ok"] is True - ad_id = result.data["CoreArtifactDefinitionCreate"]["object"]["id"] + with patch( + "infrahub.services.adapters.workflow.local.WorkflowLocalExecution.submit_workflow" + ) as mock_submit_workflow: + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) - ad1 = await NodeManager.get_one(db=db, id=ad_id, include_owner=True, include_source=True, branch=branch) + assert result.errors is None + assert result.data["CoreArtifactDefinitionCreate"]["ok"] is True + ad_id = result.data["CoreArtifactDefinitionCreate"]["object"]["id"] - assert ad1.name.value == "Artifact 01" + ad1 = await NodeManager.get_one(db=db, id=ad_id, include_owner=True, include_source=True, branch=branch) - assert ( - messages.RequestArtifactDefinitionGenerate(artifact_definition=ad_id, branch=branch.name, limit=[]) - in service.message_bus.messages - ) + assert ad1.name.value == "Artifact 01" + + expected_calls = [ + call( + workflow=REQUEST_ARTIFACT_DEFINITION_GENERATE, + parameters={ + "model": RequestArtifactDefinitionGenerate(artifact_definition=ad1.id, branch=branch.name, limit=[]) + }, + ), + ] + mock_submit_workflow.assert_has_calls(expected_calls) async def test_update_artifact_definition( @@ -140,27 +152,37 @@ async def test_update_artifact_definition( """ % (definition1.id) recorder = BusRecorder() - service = InfrahubServices(message_bus=recorder) + service = InfrahubServices(message_bus=recorder, workflow=WorkflowLocalExecution()) gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=branch, service=service) - result = await graphql( - schema=gql_params.schema, - source=query, - context_value=gql_params.context, - root_value=None, - variable_values={}, - ) - - assert result.errors is None - assert result.data["CoreArtifactDefinitionUpdate"]["ok"] is True - - ad1_post = await NodeManager.get_one( - db=db, id=definition1.id, include_owner=True, include_source=True, branch=branch - ) - - assert ad1_post.artifact_name.value == "myartifact2" - - assert ( - messages.RequestArtifactDefinitionGenerate(artifact_definition=definition1.id, branch=branch.name, limit=[]) - in service.message_bus.messages - ) + with patch( + "infrahub.services.adapters.workflow.local.WorkflowLocalExecution.submit_workflow" + ) as mock_submit_workflow: + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + assert result.data["CoreArtifactDefinitionUpdate"]["ok"] is True + + ad1_post = await NodeManager.get_one( + db=db, id=definition1.id, include_owner=True, include_source=True, branch=branch + ) + + assert ad1_post.artifact_name.value == "myartifact2" + + expected_calls = [ + call( + workflow=REQUEST_ARTIFACT_DEFINITION_GENERATE, + parameters={ + "model": RequestArtifactDefinitionGenerate( + artifact_definition=definition1.id, branch=branch.name, limit=[] + ) + }, + ), + ] + mock_submit_workflow.assert_has_calls(expected_calls) diff --git a/backend/tests/unit/graphql/test_mutation_create.py b/backend/tests/unit/graphql/test_mutation_create.py index a44e7b412a..2d9860bee7 100644 --- a/backend/tests/unit/graphql/test_mutation_create.py +++ b/backend/tests/unit/graphql/test_mutation_create.py @@ -819,8 +819,10 @@ async def test_create_with_attribute_not_valid(db: InfrahubDatabase, default_bra async def test_create_with_uniqueness_constraint_violation(db: InfrahubDatabase, default_branch, car_person_schema): - car_schema = registry.schema.get("TestCar", branch=default_branch, duplicate=False) + schema_branch = registry.schema.get_schema_branch(name=default_branch.name) + car_schema = schema_branch.get("TestCar", duplicate=True) car_schema.uniqueness_constraints = [["owner", "color"]] + schema_branch.set(name="TestCar", schema=car_schema) p1 = await Node.init(db=db, schema="TestPerson") await p1.new(db=db, name="Bruce Wayne", height=180) diff --git a/backend/tests/unit/graphql/test_mutation_update.py b/backend/tests/unit/graphql/test_mutation_update.py index ed266fa7c2..5284621e05 100644 --- a/backend/tests/unit/graphql/test_mutation_update.py +++ b/backend/tests/unit/graphql/test_mutation_update.py @@ -8,6 +8,7 @@ from infrahub.core.node import Node from infrahub.database import InfrahubDatabase from infrahub.graphql.initialization import prepare_graphql_params +from infrahub.graphql.manager import GraphQLSchemaManager async def test_update_simple_object(db: InfrahubDatabase, person_john_main: Node, branch: Branch): @@ -85,6 +86,7 @@ async def test_update_simple_object_with_enum( enum_value, response_value, ): + GraphQLSchemaManager.clear_cache() config.SETTINGS.experimental_features.graphql_enums = graphql_enums_on query = """ mutation { diff --git a/backend/tests/unit/menu/test_generator.py b/backend/tests/unit/menu/test_generator.py index 7d18927e26..f9d8b597a3 100644 --- a/backend/tests/unit/menu/test_generator.py +++ b/backend/tests/unit/menu/test_generator.py @@ -4,7 +4,7 @@ from infrahub.core.protocols import CoreMenuItem from infrahub.core.schema import SchemaRoot from infrahub.database import InfrahubDatabase -from infrahub.menu.constants import MenuSection +from infrahub.menu.constants import FULL_DEFAULT_MENU, MenuSection from infrahub.menu.generator import generate_menu from infrahub.menu.models import MenuItemDefinition from infrahub.menu.utils import create_menu_children @@ -12,7 +12,7 @@ def generate_menu_fixtures(prefix: str = "Menu", depth: int = 1, nbr_item: int = 10) -> list[MenuItemDefinition]: max_depth = 3 - next_level_item: int = 5 + next_level_item: int = 3 menu: list[MenuItemDefinition] = [] @@ -33,7 +33,7 @@ def generate_menu_fixtures(prefix: str = "Menu", depth: int = 1, nbr_item: int = return menu -async def test_generate_menu( +async def test_generate_menu_placement( db: InfrahubDatabase, default_branch: Branch, car_person_schema_generics: SchemaRoot, @@ -41,13 +41,13 @@ async def test_generate_menu( ): schema_branch = registry.schema.get_schema_branch(name=default_branch.name) - schema_electriccar = schema_branch.get(name="TestElectricCar") - schema_electriccar.menu_placement = "Builtin:ObjectManagement" - schema_branch.set(name="TestElectricCar", schema=schema_electriccar) + schema_car = schema_branch.get(name="TestCar") + schema_car.menu_placement = "BuiltinObjectManagement" + schema_branch.set(name="TestCar", schema=schema_car) await create_default_menu(db=db) - new_menu_items = generate_menu_fixtures(nbr_item=5) + new_menu_items = generate_menu_fixtures(nbr_item=2) for item in new_menu_items: obj = await item.to_node(db=db) @@ -55,10 +55,64 @@ async def test_generate_menu( if item.children: await create_menu_children(db=db, parent=obj, children=item.children) - menu_items = await registry.manager.query( - db=db, schema=CoreMenuItem, branch=default_branch, prefetch_relationships=True - ) + menu_items = await registry.manager.query(db=db, schema=CoreMenuItem, branch=default_branch) menu = await generate_menu(db=db, branch=default_branch, menu_items=menu_items) assert menu - assert "Test:Menu0" in menu.data.keys() + assert "TestMenu0" in menu.data.keys() + assert "BuiltinObjectManagement" in menu.data.keys() + assert "TestCar" in menu.data["BuiltinObjectManagement"].children.keys() + + +async def test_generate_menu_top_level( + db: InfrahubDatabase, + default_branch: Branch, + car_person_schema_generics: SchemaRoot, + helper, +): + await create_default_menu(db=db) + + new_menu_items = generate_menu_fixtures(nbr_item=2) + + for item in new_menu_items: + obj = await item.to_node(db=db) + await obj.save(db=db) + if item.children: + await create_menu_children(db=db, parent=obj, children=item.children) + + menu_items = await registry.manager.query(db=db, schema=CoreMenuItem, branch=default_branch) + menu = await generate_menu(db=db, branch=default_branch, menu_items=menu_items) + + assert menu + assert "TestMenu0" in menu.data.keys() + assert "TestCar" in menu.data.keys() + assert "TestCarSub" in menu.data["TestCar"].children.keys() + + +async def test_generate_menu_default( + db: InfrahubDatabase, + default_branch: Branch, + car_person_schema_generics: SchemaRoot, + helper, +): + schema_branch = registry.schema.get_schema_branch(name=default_branch.name) + schema_car = schema_branch.get(name="TestCar") + schema_car.menu_placement = "DoesNotExist" + schema_branch.set(name="TestCar", schema=schema_car) + + await create_default_menu(db=db) + + new_menu_items = generate_menu_fixtures(nbr_item=2) + + for item in new_menu_items: + obj = await item.to_node(db=db) + await obj.save(db=db) + if item.children: + await create_menu_children(db=db, parent=obj, children=item.children) + + menu_items = await registry.manager.query(db=db, schema=CoreMenuItem, branch=default_branch) + menu = await generate_menu(db=db, branch=default_branch, menu_items=menu_items) + + assert menu + assert "TestMenu0" in menu.data.keys() + assert "TestCar" in menu.data[FULL_DEFAULT_MENU].children.keys() diff --git a/backend/tests/unit/message_bus/operations/event/test_branch.py b/backend/tests/unit/message_bus/operations/event/test_branch.py index caaf4c093f..7644008caa 100644 --- a/backend/tests/unit/message_bus/operations/event/test_branch.py +++ b/backend/tests/unit/message_bus/operations/event/test_branch.py @@ -1,18 +1,34 @@ -from unittest.mock import AsyncMock, MagicMock, Mock, patch +from unittest.mock import AsyncMock, MagicMock, Mock, call, patch from uuid import uuid4 +import pytest + from infrahub.core.branch import Branch from infrahub.core.diff.model.path import BranchTrackingId, EnrichedDiffRoot +from infrahub.core.diff.models import RequestDiffRefresh, RequestDiffUpdate from infrahub.core.diff.repository.repository import DiffRepository from infrahub.core.timestamp import Timestamp from infrahub.dependencies.component.registry import ComponentDependencyRegistry from infrahub.message_bus import messages from infrahub.message_bus.operations.event.branch import delete, merge, rebased -from infrahub.services import InfrahubServices +from infrahub.services import InfrahubServices, services from infrahub.services.adapters.workflow.local import WorkflowLocalExecution +from infrahub.workflows.catalogue import REQUEST_DIFF_REFRESH, REQUEST_DIFF_UPDATE, TRIGGER_ARTIFACT_DEFINITION_GENERATE from tests.adapters.message_bus import BusRecorder +@pytest.fixture +def init_service(): + original = services.service + recorder = BusRecorder() + database = MagicMock() + workflow = WorkflowLocalExecution() + service = InfrahubServices(message_bus=recorder, database=database, workflow=workflow) + services.service = service + yield service + services.service = original + + async def test_delete(prefect_test_fixture): """Validate that a deleted branch triggers a registry refresh and cancels open proposed changes""" @@ -32,18 +48,20 @@ async def test_delete(prefect_test_fixture): assert trigger_cancel.branch == "cr1234" -async def test_merged(default_branch: Branch, prefect_test_fixture): +async def test_merged(default_branch: Branch, init_service: InfrahubServices, prefect_test_fixture): + """ + Test that merge flow triggers corrects events/workflows. It does not actually test these events/workflows behaviors + as they are mocked. + """ + source_branch_name = "cr1234" target_branch_name = "main" right_now = Timestamp() message = messages.EventBranchMerge( source_branch=source_branch_name, target_branch=target_branch_name, ipam_node_details=[] ) + service = init_service - recorder = BusRecorder() - database = MagicMock() - workflow = WorkflowLocalExecution() - service = InfrahubServices(message_bus=recorder, database=database, workflow=workflow) tracked_diff_roots = [ EnrichedDiffRoot( base_branch_name=target_branch_name, @@ -73,17 +91,36 @@ async def test_merged(default_branch: Branch, prefect_test_fixture): mock_get_component_registry = MagicMock(return_value=mock_component_registry) mock_component_registry.get_component.return_value = diff_repo - with patch("infrahub.message_bus.operations.event.branch.get_component_registry", new=mock_get_component_registry): + with ( + patch("infrahub.message_bus.operations.event.branch.get_component_registry", new=mock_get_component_registry), + patch( + "infrahub.services.adapters.workflow.local.WorkflowLocalExecution.submit_workflow" + ) as mock_submit_workflow, + ): await merge(message=message, service=service) - mock_component_registry.get_component.assert_awaited_once_with(DiffRepository, db=database, branch=default_branch) + expected_calls = [ + call(workflow=TRIGGER_ARTIFACT_DEFINITION_GENERATE, parameters={"branch": message.target_branch}), + call( + workflow=REQUEST_DIFF_UPDATE, + parameters={"model": RequestDiffUpdate(branch_name=tracked_diff_roots[0].diff_branch_name)}, + ), + call( + workflow=REQUEST_DIFF_UPDATE, + parameters={"model": RequestDiffUpdate(branch_name=tracked_diff_roots[1].diff_branch_name)}, + ), + ] + mock_submit_workflow.assert_has_calls(expected_calls) + assert mock_submit_workflow.call_count == len(expected_calls) + + mock_component_registry.get_component.assert_awaited_once_with( + DiffRepository, db=service.database, branch=default_branch + ) diff_repo.get_empty_roots.assert_awaited_once_with(base_branch_names=[target_branch_name]) - assert len(recorder.messages) == 5 - assert recorder.messages[0] == messages.RefreshRegistryBranches() - assert recorder.messages[1] == messages.TriggerArtifactDefinitionGenerate(branch=target_branch_name) - assert recorder.messages[2] == messages.TriggerGeneratorDefinitionRun(branch=target_branch_name) - assert recorder.messages[3] == messages.RequestDiffUpdate(branch_name=tracked_diff_roots[0].diff_branch_name) - assert recorder.messages[4] == messages.RequestDiffUpdate(branch_name=tracked_diff_roots[1].diff_branch_name) + + assert len(service.message_bus.messages) == 2 + assert service.message_bus.messages[0] == messages.RefreshRegistryBranches() + assert service.message_bus.messages[1] == messages.TriggerGeneratorDefinitionRun(branch=target_branch_name) async def test_rebased(default_branch: Branch, prefect_test_fixture): @@ -94,7 +131,7 @@ async def test_rebased(default_branch: Branch, prefect_test_fixture): recorder = BusRecorder() database = MagicMock() - service = InfrahubServices(message_bus=recorder, database=database) + service = InfrahubServices(message_bus=recorder, database=database, workflow=WorkflowLocalExecution()) diff_roots = [ EnrichedDiffRoot( base_branch_name="main", @@ -112,14 +149,30 @@ async def test_rebased(default_branch: Branch, prefect_test_fixture): mock_get_component_registry = MagicMock(return_value=mock_component_registry) mock_component_registry.get_component.return_value = diff_repo - with patch("infrahub.message_bus.operations.event.branch.get_component_registry", new=mock_get_component_registry): + with ( + patch("infrahub.message_bus.operations.event.branch.get_component_registry", new=mock_get_component_registry), + patch( + "infrahub.services.adapters.workflow.local.WorkflowLocalExecution.submit_workflow" + ) as mock_submit_workflow, + ): await rebased(message=message, service=service) + expected_calls = [ + call( + workflow=REQUEST_DIFF_REFRESH, + parameters={"model": RequestDiffRefresh(branch_name=branch_name, diff_id=diff_roots[0].uuid)}, + ), + call( + workflow=REQUEST_DIFF_REFRESH, + parameters={"model": RequestDiffRefresh(branch_name=branch_name, diff_id=diff_roots[1].uuid)}, + ), + ] + mock_submit_workflow.assert_has_calls(expected_calls) + assert mock_submit_workflow.call_count == len(expected_calls) + mock_component_registry.get_component.assert_awaited_once_with(DiffRepository, db=database, branch=default_branch) diff_repo.get_empty_roots.assert_awaited_once_with(diff_branch_names=[branch_name]) - assert len(recorder.messages) == 3 + assert len(recorder.messages) == 1 assert isinstance(recorder.messages[0], messages.RefreshRegistryRebasedBranch) refresh_message: messages.RefreshRegistryRebasedBranch = recorder.messages[0] assert refresh_message.branch == "cr1234" - assert recorder.messages[1] == messages.RequestDiffRefresh(branch_name=branch_name, diff_id=diff_roots[0].uuid) - assert recorder.messages[2] == messages.RequestDiffRefresh(branch_name=branch_name, diff_id=diff_roots[1].uuid) diff --git a/backend/tests/unit/message_bus/operations/git/test_branch.py b/backend/tests/unit/message_bus/operations/git/test_branch.py deleted file mode 100644 index 9fa7e5974f..0000000000 --- a/backend/tests/unit/message_bus/operations/git/test_branch.py +++ /dev/null @@ -1,26 +0,0 @@ -from infrahub_sdk import InfrahubClient - -from infrahub.git import InfrahubRepository -from infrahub.message_bus import messages -from infrahub.services import InfrahubServices - - -async def test_branch_create(git_fixture_repo: InfrahubRepository, helper): - repo = git_fixture_repo.get_git_repo_main() - original_branches = [ref.name for ref in repo.refs if not ref.name.startswith("origin/")] - message = messages.GitBranchCreate( - repository_id=str(git_fixture_repo.id), - repository_name=git_fixture_repo.name, - branch="new-branch", - branch_id="69a92981-1694-4b1e-84ad-78ee4de0fe26", - ) - - bus_simulator = helper.get_message_bus_simulator() - service = InfrahubServices(client=InfrahubClient(), message_bus=bus_simulator) - bus_simulator.service = service - - await service.send(message=message) - - branches = [ref.name for ref in repo.refs if not ref.name.startswith("origin/")] - assert original_branches == ["main"] - assert branches == sorted(["main", "new-branch"]) diff --git a/backend/tests/unit/permissions/test_backends.py b/backend/tests/unit/permissions/test_backends.py index 826fb375e0..e152d86b21 100644 --- a/backend/tests/unit/permissions/test_backends.py +++ b/backend/tests/unit/permissions/test_backends.py @@ -1,17 +1,19 @@ +from infrahub.auth import AccountSession from infrahub.core.account import GlobalPermission, ObjectPermission from infrahub.core.branch import Branch from infrahub.core.constants import GlobalPermissions, InfrahubKind, PermissionAction, PermissionDecision from infrahub.core.node import Node -from infrahub.core.protocols import CoreAccount from infrahub.database import InfrahubDatabase from infrahub.permissions import LocalPermissionBackend from infrahub.permissions.constants import PermissionDecisionFlag -async def test_load_permissions(db: InfrahubDatabase, default_branch: Branch, create_test_admin, first_account): +async def test_load_permissions( + db: InfrahubDatabase, default_branch: Branch, session_admin: AccountSession, session_first_account: AccountSession +): backend = LocalPermissionBackend() - permissions = await backend.load_permissions(db=db, account_id=create_test_admin.id, branch=default_branch) + permissions = await backend.load_permissions(db=db, account_session=session_admin, branch=default_branch) assert "global_permissions" in permissions assert permissions["global_permissions"][0].action == GlobalPermissions.SUPER_ADMIN.value @@ -19,15 +21,11 @@ async def test_load_permissions(db: InfrahubDatabase, default_branch: Branch, cr assert "object_permissions" in permissions assert str(permissions["object_permissions"][0]) == str( ObjectPermission( - id="", - namespace="*", - name="*", - action=PermissionAction.ANY.value, - decision=PermissionDecision.ALLOW_ALL.value, + namespace="*", name="*", action=PermissionAction.ANY.value, decision=PermissionDecision.ALLOW_ALL.value ) ) - permissions = await backend.load_permissions(db=db, account_id=first_account.id, branch=default_branch) + permissions = await backend.load_permissions(db=db, account_session=session_first_account, branch=default_branch) assert "global_permissions" in permissions assert not permissions["global_permissions"] @@ -40,27 +38,19 @@ async def test_has_permission_global( db: InfrahubDatabase, default_branch: Branch, register_core_models_schema: None, - create_test_admin: CoreAccount, - first_account: CoreAccount, - second_account: CoreAccount, + session_admin: AccountSession, + session_first_account: AccountSession, + session_second_account: AccountSession, ): backend = LocalPermissionBackend() allow_default_branch_edition = GlobalPermission( - id="", - action=GlobalPermissions.EDIT_DEFAULT_BRANCH.value, - decision=PermissionDecision.ALLOW_ALL.value, - name="Edit default branch", + action=GlobalPermissions.EDIT_DEFAULT_BRANCH.value, decision=PermissionDecision.ALLOW_ALL.value ) role1_permissions = [] obj = await Node.init(db=db, schema=InfrahubKind.GLOBALPERMISSION) - await obj.new( - db=db, - name=allow_default_branch_edition.name, - action=allow_default_branch_edition.action, - decision=allow_default_branch_edition.decision, - ) + await obj.new(db=db, action=allow_default_branch_edition.action, decision=allow_default_branch_edition.decision) await obj.save(db=db) role1_permissions.append(obj) @@ -72,21 +62,16 @@ async def test_has_permission_global( await group1.new(db=db, name="group1", roles=[role1]) await group1.save(db=db) - await group1.members.add(db=db, data={"id": first_account.id}) + await group1.members.add(db=db, data={"id": session_first_account.account_id}) await group1.members.save(db=db) role2_permissions = [] for p in [ allow_default_branch_edition, - GlobalPermission( - id="", - action=GlobalPermissions.EDIT_DEFAULT_BRANCH.value, - decision=PermissionDecision.DENY.value, - name="Edit default branch", - ), + GlobalPermission(action=GlobalPermissions.EDIT_DEFAULT_BRANCH.value, decision=PermissionDecision.DENY.value), ]: obj = await Node.init(db=db, schema=InfrahubKind.GLOBALPERMISSION) - await obj.new(db=db, name=p.name, action=p.action, decision=p.decision) + await obj.new(db=db, action=p.action, decision=p.decision) await obj.save(db=db) role2_permissions.append(obj) @@ -98,14 +83,14 @@ async def test_has_permission_global( await group2.new(db=db, name="group2", roles=[role2]) await group2.save(db=db) - await group2.members.add(db=db, data={"id": second_account.id}) + await group2.members.add(db=db, data={"id": session_second_account.account_id}) await group2.members.save(db=db) assert await backend.has_permission( - db=db, account_id=first_account.id, permission=allow_default_branch_edition, branch=default_branch + db=db, account_session=session_first_account, permission=allow_default_branch_edition, branch=default_branch ) assert not await backend.has_permission( - db=db, account_id=second_account.id, permission=allow_default_branch_edition, branch=default_branch + db=db, account_session=session_second_account, permission=allow_default_branch_edition, branch=default_branch ) @@ -113,27 +98,19 @@ async def test_has_permission_object( db: InfrahubDatabase, default_branch: Branch, register_core_models_schema: None, - create_test_admin: CoreAccount, - first_account: CoreAccount, - second_account: CoreAccount, + session_admin: AccountSession, + session_first_account: AccountSession, + session_second_account: AccountSession, ): backend = LocalPermissionBackend() role1_permissions = [] for p in [ ObjectPermission( - id="", - namespace="*", - name="*", - action=PermissionAction.ANY.value, - decision=PermissionDecision.ALLOW_ALL.value, + namespace="*", name="*", action=PermissionAction.ANY.value, decision=PermissionDecision.ALLOW_ALL.value ), ObjectPermission( - id="", - namespace="Builtin", - name="Tag", - action=PermissionAction.ANY.value, - decision=PermissionDecision.DENY.value, + namespace="Builtin", name="Tag", action=PermissionAction.ANY.value, decision=PermissionDecision.DENY.value ), ]: obj = await Node.init(db=db, schema=InfrahubKind.OBJECTPERMISSION) @@ -149,20 +126,15 @@ async def test_has_permission_object( await group1.new(db=db, name="group1", roles=[role1]) await group1.save(db=db) - await group1.members.add(db=db, data={"id": first_account.id}) + await group1.members.add(db=db, data={"id": session_first_account.account_id}) await group1.members.save(db=db) role2_permissions = [] for p in [ ObjectPermission( - id="", - namespace="*", - name="*", - action=PermissionAction.ANY.value, - decision=PermissionDecision.DENY.value, + namespace="*", name="*", action=PermissionAction.ANY.value, decision=PermissionDecision.DENY.value ), ObjectPermission( - id="", namespace="Builtin", name="Tag", action=PermissionAction.ANY.value, @@ -182,21 +154,20 @@ async def test_has_permission_object( await group2.new(db=db, name="group2", roles=[role2]) await group2.save(db=db) - await group2.members.add(db=db, data={"id": second_account.id}) + await group2.members.add(db=db, data={"id": session_second_account.account_id}) await group2.members.save(db=db) permission = ObjectPermission( - id="", namespace="Builtin", name="Tag", action=PermissionAction.CREATE.value, decision=PermissionDecision.ALLOW_ALL.value, ) assert not await backend.has_permission( - db=db, account_id=first_account.id, permission=permission, branch=default_branch + db=db, account_session=session_first_account, permission=permission, branch=default_branch ) assert await backend.has_permission( - db=db, account_id=second_account.id, permission=permission, branch=default_branch + db=db, account_session=session_second_account, permission=permission, branch=default_branch ) @@ -204,27 +175,19 @@ async def test_report_permission_object( db: InfrahubDatabase, default_branch: Branch, register_core_models_schema: None, - create_test_admin: CoreAccount, - first_account: CoreAccount, - second_account: CoreAccount, + session_admin: AccountSession, + session_first_account: AccountSession, + session_second_account: AccountSession, ): backend = LocalPermissionBackend() role1_permissions = [] for p in [ ObjectPermission( - id="", - namespace="*", - name="*", - action=PermissionAction.ANY.value, - decision=PermissionDecision.ALLOW_ALL.value, + namespace="*", name="*", action=PermissionAction.ANY.value, decision=PermissionDecision.ALLOW_ALL.value ), ObjectPermission( - id="", - namespace="Builtin", - name="Tag", - action=PermissionAction.ANY.value, - decision=PermissionDecision.DENY.value, + namespace="Builtin", name="Tag", action=PermissionAction.ANY.value, decision=PermissionDecision.DENY.value ), ]: obj = await Node.init(db=db, schema=InfrahubKind.OBJECTPERMISSION) @@ -240,20 +203,15 @@ async def test_report_permission_object( await group1.new(db=db, name="group1", roles=[role1]) await group1.save(db=db) - await group1.members.add(db=db, data={"id": first_account.id}) + await group1.members.add(db=db, data={"id": session_first_account.account_id}) await group1.members.save(db=db) role2_permissions = [] for p in [ ObjectPermission( - id="", - namespace="*", - name="*", - action=PermissionAction.ANY.value, - decision=PermissionDecision.DENY.value, + namespace="*", name="*", action=PermissionAction.ANY.value, decision=PermissionDecision.DENY.value ), ObjectPermission( - id="", namespace="Builtin", name="Tag", action=PermissionAction.ANY.value, @@ -273,10 +231,12 @@ async def test_report_permission_object( await group2.new(db=db, name="group2", roles=[role2]) await group2.save(db=db) - await group2.members.add(db=db, data={"id": second_account.id}) + await group2.members.add(db=db, data={"id": session_second_account.account_id}) await group2.members.save(db=db) - first_permissions = await backend.load_permissions(db=db, account_id=first_account.id, branch=default_branch) + first_permissions = await backend.load_permissions( + db=db, account_session=session_first_account, branch=default_branch + ) assert ( backend.report_object_permission( @@ -291,7 +251,9 @@ async def test_report_permission_object( == PermissionDecisionFlag.ALLOW_ALL ) - second_permissions = await backend.load_permissions(db=db, account_id=second_account.id, branch=default_branch) + second_permissions = await backend.load_permissions( + db=db, account_session=session_second_account, branch=default_branch + ) assert ( backend.report_object_permission( diff --git a/changelog/+compose-ansi.fixed.md b/changelog/+compose-ansi.fixed.md deleted file mode 100644 index bdbe0f923a..0000000000 --- a/changelog/+compose-ansi.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixes an issue where docker compose would output ANSI control characters that don't support it diff --git a/changelog/+d25b6331.deprecated.md b/changelog/+d25b6331.deprecated.md deleted file mode 100644 index 59b45f18c8..0000000000 --- a/changelog/+d25b6331.deprecated.md +++ /dev/null @@ -1 +0,0 @@ -Marked CoreAccount.role as deprecated. Due to the new permissions framework the account roles "admin" / "read-only" / "read-write" are deprecated and will be removed in Infrahub 1.1. diff --git a/changelog/+discord-link.added.md b/changelog/+discord-link.added.md deleted file mode 100644 index c3a78f016d..0000000000 --- a/changelog/+discord-link.added.md +++ /dev/null @@ -1 +0,0 @@ -Added link to our Discord server in the account menu diff --git a/changelog/+docs-docker.fixed.md b/changelog/+docs-docker.fixed.md deleted file mode 100644 index 33d66ba293..0000000000 --- a/changelog/+docs-docker.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Prevent temporary directories generated by Docusaurus to be imported by Docker \ No newline at end of file diff --git a/changelog/+f4bb80ab.added.md b/changelog/+f4bb80ab.added.md deleted file mode 100644 index 9c0e3e538a..0000000000 --- a/changelog/+f4bb80ab.added.md +++ /dev/null @@ -1,3 +0,0 @@ -Added permissions framework for global and object kind level permissions - -In this first iteration the object permissions are applied to nodes as a whole, in upcoming versions it will be possible to define attribute level permissions as well. diff --git a/changelog/+metric-none.fixed.md b/changelog/+metric-none.fixed.md deleted file mode 100644 index 31eb15687a..0000000000 --- a/changelog/+metric-none.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix metric missing the query name in Prometheus data \ No newline at end of file diff --git a/changelog/+permissions.added.md b/changelog/+permissions.added.md deleted file mode 100644 index 435a7b9e64..0000000000 --- a/changelog/+permissions.added.md +++ /dev/null @@ -1,4 +0,0 @@ -New permissions system in UI: -- Implemented CRUD views for managing accounts, groups, roles, and permissions -- Updated all components to support new permission system -- Added dynamic message display according to user access levels diff --git a/changelog/+reo_dot_dev.added.md b/changelog/+reo_dot_dev.added.md deleted file mode 100644 index 5f679fca05..0000000000 --- a/changelog/+reo_dot_dev.added.md +++ /dev/null @@ -1 +0,0 @@ -Added .js for Reo.dev analytics integration. diff --git a/changelog/+sdk-timeout.fixed.md b/changelog/+sdk-timeout.fixed.md deleted file mode 100644 index 59a37563d0..0000000000 --- a/changelog/+sdk-timeout.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Changed the Python SDK connection timeout to 60s. diff --git a/changelog/1075.fixed.md b/changelog/1075.fixed.md deleted file mode 100644 index 02ee48e323..0000000000 --- a/changelog/1075.fixed.md +++ /dev/null @@ -1 +0,0 @@ -The `infrahub-git` agent service has been renamed to `task-worker` in docker compose and the command to start it has been updated as well \ No newline at end of file diff --git a/changelog/1568.added.md b/changelog/1568.added.md deleted file mode 100644 index a179468657..0000000000 --- a/changelog/1568.added.md +++ /dev/null @@ -1 +0,0 @@ -Add support to signin with OAuth2 and Open ID Connect (OIDC) diff --git a/changelog/3302.added.md b/changelog/3302.added.md deleted file mode 100644 index e7d414c329..0000000000 --- a/changelog/3302.added.md +++ /dev/null @@ -1 +0,0 @@ -Add internal HTTP adapter to allow for generic access from Infrahub diff --git a/changelog/3435.fixed.md b/changelog/3435.fixed.md deleted file mode 100644 index d91c9c7382..0000000000 --- a/changelog/3435.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Add ability to import repositories with default branch other than 'main'. diff --git a/changelog/3495.fixed.md b/changelog/3495.fixed.md deleted file mode 100644 index 4bcba7c5bd..0000000000 --- a/changelog/3495.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Disable approve/merge/close buttons for merged Proposed Changes diff --git a/changelog/3884.removed.md b/changelog/3884.removed.md deleted file mode 100644 index 17a2ca06c4..0000000000 --- a/changelog/3884.removed.md +++ /dev/null @@ -1 +0,0 @@ -Remove previously deprecated GET api endpoint "/api/schema/" diff --git a/changelog/3908.added.md b/changelog/3908.added.md deleted file mode 100644 index 8612bf2e8d..0000000000 --- a/changelog/3908.added.md +++ /dev/null @@ -1 +0,0 @@ -Add support to search a node by human friendly ID within a GraphQL query \ No newline at end of file diff --git a/changelog/4062.fixed.md b/changelog/4062.fixed.md deleted file mode 100644 index dfbbe0e503..0000000000 --- a/changelog/4062.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Allow users to run artifacts and generators on nodes without name attribute \ No newline at end of file diff --git a/changelog/4315.fixed.md b/changelog/4315.fixed.md deleted file mode 100644 index 9073458e21..0000000000 --- a/changelog/4315.fixed.md +++ /dev/null @@ -1 +0,0 @@ -"Retry All" button for checks is bigger diff --git a/changelog/4432.fixed.md b/changelog/4432.fixed.md deleted file mode 100644 index 48cfdeaf36..0000000000 --- a/changelog/4432.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Add a size restriction on common attribute kinds. Only TextArea and JSON support large values diff --git a/changelog/4482.fixed.md b/changelog/4482.fixed.md deleted file mode 100644 index f6ddd2e531..0000000000 --- a/changelog/4482.fixed.md +++ /dev/null @@ -1 +0,0 @@ -The HFID of a related node is properly returned via GraphQL in all scenarios \ No newline at end of file diff --git a/development/docker-compose.yml b/development/docker-compose.yml index 6963ca7793..624f8d4f23 100644 --- a/development/docker-compose.yml +++ b/development/docker-compose.yml @@ -127,9 +127,9 @@ services: INFRAHUB_SECURITY_SECRET_KEY: 327f747f-efac-42be-9e73-999f08f86b92 INFRAHUB_BROKER_ADDRESS: message-queue INFRAHUB_CACHE_ADDRESS: "${INFRAHUB_CACHE_ADDRESS:-cache}" - INFRAHUB_WORKFLOW_ADDRESS: task-manager - INFRAHUB_WORKFLOW_PORT: 4200 - PREFECT_API_URL: "http://task-manager:4200/api" + INFRAHUB_WORKFLOW_ADDRESS: "${INFRAHUB_WORKFLOW_ADDRESS:-task-manager}" + INFRAHUB_WORKFLOW_PORT: "${INFRAHUB_WORKFLOW_PORT:-4200}" + PREFECT_API_URL: http://${INFRAHUB_WORKFLOW_ADDRESS:-task-manager}:${INFRAHUB_WORKFLOW_PORT:-4200}/api INFRAHUB_DB_ADDRESS: database INFRAHUB_DB_USERNAME: neo4j INFRAHUB_DB_PASSWORD: admin @@ -138,7 +138,6 @@ services: INFRAHUB_STORAGE_DRIVER: local volumes: - "storage_data:/opt/infrahub/storage" - - "workflow_data:/opt/infrahub/workflow" tty: true labels: com.github.run_id: "${GITHUB_RUN_ID:-unknown}" @@ -188,7 +187,6 @@ services: labels: com.github.run_id: "${GITHUB_RUN_ID:-unknown}" com.github.job: "${JOB_NAME:-unknown}" - task-worker: profiles: [dev] deploy: @@ -217,7 +215,7 @@ services: INFRAHUB_CACHE_ADDRESS: "${INFRAHUB_CACHE_ADDRESS:-cache}" INFRAHUB_WORKFLOW_ADDRESS: "${INFRAHUB_WORKFLOW_ADDRESS:-task-manager}" INFRAHUB_WORKFLOW_PORT: "${INFRAHUB_WORKFLOW_PORT:-4200}" - PREFECT_API_URL: "http://task-manager:4200/api" + PREFECT_API_URL: http://${INFRAHUB_WORKFLOW_ADDRESS:-task-manager}:${INFRAHUB_WORKFLOW_PORT:-4200}/api INFRAHUB_DB_USERNAME: neo4j INFRAHUB_DB_PASSWORD: admin INFRAHUB_DB_PORT: 7687 @@ -235,4 +233,3 @@ volumes: git_data: git_remote_data: storage_data: - workflow_data: diff --git a/docker-compose.yml b/docker-compose.yml index 926538cd68..4e015084b2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -18,6 +18,7 @@ x-infrahub-config: &infrahub_config INFRAHUB_ANALYTICS_ADDRESS: INFRAHUB_ANALYTICS_API_KEY: INFRAHUB_ANALYTICS_ENABLE: ${INFRAHUB_ANALYTICS_ENABLE:-true} + INFRAHUB_ANONYMOUS_ACCESS_ROLE: ${INFRAHUB_ANONYMOUS_ACCESS_ROLE:-Anonymous User} INFRAHUB_API_CORS_ALLOW_CREDENTIALS: ${INFRAHUB_API_CORS_ALLOW_CREDENTIALS:-true} INFRAHUB_API_CORS_ALLOW_HEADERS: INFRAHUB_API_CORS_ALLOW_METHODS: @@ -30,6 +31,7 @@ x-infrahub-config: &infrahub_config INFRAHUB_BROKER_NAMESPACE: ${INFRAHUB_BROKER_NAMESPACE:-infrahub} INFRAHUB_BROKER_PASSWORD: &broker_password ${INFRAHUB_BROKER_PASSWORD:-infrahub} INFRAHUB_BROKER_PORT: + INFRAHUB_BROKER_RABBITMQ_HTTP_PORT: INFRAHUB_BROKER_TLS_CA_FILE: INFRAHUB_BROKER_TLS_ENABLED: ${INFRAHUB_BROKER_TLS_ENABLED:-false} INFRAHUB_BROKER_TLS_INSECURE: ${INFRAHUB_BROKER_TLS_INSECURE:-false} @@ -39,12 +41,12 @@ x-infrahub-config: &infrahub_config INFRAHUB_CACHE_DATABASE: ${INFRAHUB_CACHE_DATABASE:-0} INFRAHUB_CACHE_DRIVER: ${INFRAHUB_CACHE_DRIVER:-redis} INFRAHUB_CACHE_ENABLE: ${INFRAHUB_CACHE_ENABLE:-true} - INFRAHUB_CACHE_PASSWORD: ${INFRAHUB_CACHE_PASSWORD:-infrahub} + INFRAHUB_CACHE_PASSWORD: &cache_password ${INFRAHUB_CACHE_PASSWORD:-} INFRAHUB_CACHE_PORT: INFRAHUB_CACHE_TLS_CA_FILE: INFRAHUB_CACHE_TLS_ENABLED: ${INFRAHUB_CACHE_TLS_ENABLED:-false} INFRAHUB_CACHE_TLS_INSECURE: ${INFRAHUB_CACHE_TLS_INSECURE:-false} - INFRAHUB_CACHE_USERNAME: ${INFRAHUB_CACHE_USERNAME:-infrahub} + INFRAHUB_CACHE_USERNAME: &cache_username ${INFRAHUB_CACHE_USERNAME:-} INFRAHUB_CONFIG: INFRAHUB_DB_ADDRESS: ${INFRAHUB_DB_ADDRESS:-localhost} INFRAHUB_DB_DATABASE: @@ -64,6 +66,9 @@ x-infrahub-config: &infrahub_config INFRAHUB_EXPERIMENTAL_PULL_REQUEST: ${INFRAHUB_EXPERIMENTAL_PULL_REQUEST:-false} INFRAHUB_GIT_REPOSITORIES_DIRECTORY: ${INFRAHUB_GIT_REPOSITORIES_DIRECTORY:-repositories} INFRAHUB_GIT_SYNC_INTERVAL: ${INFRAHUB_GIT_SYNC_INTERVAL:-10} + INFRAHUB_HTTP_TIMEOUT: ${INFRAHUB_HTTP_TIMEOUT:-10} + INFRAHUB_HTTP_TLS_CA_BUNDLE: + INFRAHUB_HTTP_TLS_INSECURE: ${INFRAHUB_HTTP_TLS_INSECURE:-false} INFRAHUB_INITIAL_ADMIN_PASSWORD: ${INFRAHUB_INITIAL_ADMIN_PASSWORD:-infrahub} INFRAHUB_INITIAL_ADMIN_TOKEN: INFRAHUB_INITIAL_AGENT_PASSWORD: @@ -79,8 +84,13 @@ x-infrahub-config: &infrahub_config INFRAHUB_MISC_PRINT_QUERY_DETAILS: ${INFRAHUB_MISC_PRINT_QUERY_DETAILS:-false} INFRAHUB_MISC_RESPONSE_DELAY: ${INFRAHUB_MISC_RESPONSE_DELAY:-0} INFRAHUB_MISC_START_BACKGROUND_RUNNER: ${INFRAHUB_MISC_START_BACKGROUND_RUNNER:-true} + INFRAHUB_PERMISSION_BACKENDS: ${INFRAHUB_PERMISSION_BACKENDS:-["infrahub.permissions.LocalPermissionBackend"]} INFRAHUB_PRODUCTION: INFRAHUB_SECURITY_ACCESS_TOKEN_LIFETIME: ${INFRAHUB_SECURITY_ACCESS_TOKEN_LIFETIME:-3600} + INFRAHUB_SECURITY_OAUTH2_PROVIDERS: + INFRAHUB_SECURITY_OAUTH2_PROVIDER_SETTINGS: + INFRAHUB_SECURITY_OIDC_PROVIDERS: + INFRAHUB_SECURITY_OIDC_PROVIDER_SETTINGS: INFRAHUB_SECURITY_REFRESH_TOKEN_LIFETIME: ${INFRAHUB_SECURITY_REFRESH_TOKEN_LIFETIME:-2592000} INFRAHUB_SECURITY_SECRET_KEY: INFRAHUB_STORAGE_BUCKET_NAME: @@ -92,7 +102,7 @@ x-infrahub-config: &infrahub_config INFRAHUB_STORAGE_QUERYSTRING_AUTH: ${INFRAHUB_STORAGE_QUERYSTRING_AUTH:-false} INFRAHUB_STORAGE_USE_SSL: ${INFRAHUB_STORAGE_USE_SSL:-true} INFRAHUB_TELEMETRY_ENDPOINT: ${INFRAHUB_TELEMETRY_ENDPOINT:-https://telemetry.opsmill.cloud/infrahub} - INFRAHUB_TELEMETRY_INTERVAL: ${INFRAHUB_TELEMETRY_INTERVAL:-86400} + INFRAHUB_TELEMETRY_INTERVAL: INFRAHUB_TELEMETRY_OPTOUT: ${INFRAHUB_TELEMETRY_OPTOUT:-false} INFRAHUB_TIMEOUT: INFRAHUB_TRACE_ENABLE: ${INFRAHUB_TRACE_ENABLE:-false} @@ -100,8 +110,15 @@ x-infrahub-config: &infrahub_config INFRAHUB_TRACE_EXPORTER_PROTOCOL: ${INFRAHUB_TRACE_EXPORTER_PROTOCOL:-grpc} INFRAHUB_TRACE_EXPORTER_TYPE: ${INFRAHUB_TRACE_EXPORTER_TYPE:-console} INFRAHUB_TRACE_INSECURE: ${INFRAHUB_TRACE_INSECURE:-true} + INFRAHUB_WORKFLOW_ADDRESS: ${INFRAHUB_WORKFLOW_ADDRESS:-localhost} + INFRAHUB_WORKFLOW_DRIVER: ${INFRAHUB_WORKFLOW_DRIVER:-worker} + INFRAHUB_WORKFLOW_ENABLE: ${INFRAHUB_WORKFLOW_ENABLE:-true} + INFRAHUB_WORKFLOW_PORT: + INFRAHUB_WORKFLOW_TLS_ENABLED: ${INFRAHUB_WORKFLOW_TLS_ENABLED:-false} + INFRAHUB_WORKFLOW_WORKER_POLLING_INTERVAL: ${INFRAHUB_WORKFLOW_WORKER_POLLING_INTERVAL:-2} OTEL_RESOURCE_ATTRIBUTES: + services: message-queue: image: ${MESSAGE_QUEUE_DOCKER_IMAGE:-rabbitmq:3.13.7-management} @@ -113,7 +130,8 @@ services: test: rabbitmq-diagnostics -q check_port_connectivity interval: 5s timeout: 30s - retries: 3 + retries: 10 + start_period: 3s ports: - 15692:15692 @@ -130,7 +148,7 @@ services: image: ${NEO4J_DOCKER_IMAGE:-neo4j:5.20.0-community} restart: unless-stopped environment: - NEO4J_AUTH: neo4j/${INFRAHUB_DB_PASSWORD:-admin} + NEO4J_AUTH: ${INFRAHUB_DB_USERNAME:-neo4j}/${INFRAHUB_DB_PASSWORD:-admin} NEO4J_dbms_security_procedures_unrestricted: "apoc.*" NEO4J_dbms_security_auth__minimum__password__length: 4 volumes: @@ -150,18 +168,16 @@ services: image: "${TASK_MANAGER_DOCKER_IMAGE:-prefecthq/prefect:3.0.3-python3.12}" command: prefect server start --host 0.0.0.0 --ui restart: unless-stopped + depends_on: + - task-manager-db environment: PREFECT_API_DATABASE_CONNECTION_URL: postgresql+asyncpg://postgres:postgres@task-manager-db:5432/prefect - PREFECT_WORKER_QUERY_SECONDS: 3 - PREFECT_AGENT_QUERY_INTERVAL: 3 healthcheck: test: /usr/local/bin/httpx http://localhost:4200/api/health || exit 1 interval: 5s timeout: 5s retries: 20 start_period: 10s - depends_on: - - task-manager-db task-manager-db: image: postgres:16-alpine @@ -183,6 +199,7 @@ services: restart: unless-stopped command: > gunicorn --config backend/infrahub/serve/gunicorn_config.py + -w ${WEB_CONCURRENCY:-4} --logger-class infrahub.serve.log.GunicornLogger infrahub.server:app depends_on: @@ -192,6 +209,8 @@ services: condition: service_healthy cache: condition: service_healthy + task-manager: + condition: service_healthy environment: <<: *infrahub_config INFRAHUB_PRODUCTION: ${INFRAHUB_PRODUCTION:-false} @@ -199,13 +218,17 @@ services: INFRAHUB_BROKER_ADDRESS: ${INFRAHUB_BROKER_ADDRESS:-message-queue} INFRAHUB_CACHE_ADDRESS: ${INFRAHUB_CACHE_ADDRESS:-cache} INFRAHUB_DB_ADDRESS: ${INFRAHUB_DB_ADDRESS:-database} + INFRAHUB_WORKFLOW_ADDRESS: ${INFRAHUB_WORKFLOW_ADDRESS:-task-manager} INFRAHUB_INITIAL_ADMIN_TOKEN: ${INFRAHUB_INITIAL_ADMIN_TOKEN:-06438eb2-8019-4776-878c-0941b1f1d1ec} INFRAHUB_INITIAL_AGENT_TOKEN: ${INFRAHUB_INITIAL_AGENT_TOKEN:-44af444d-3b26-410d-9546-b758657e026c} INFRAHUB_SECURITY_SECRET_KEY: ${INFRAHUB_SECURITY_SECRET_KEY:-327f747f-efac-42be-9e73-999f08f86b92"} + INFRAHUB_WORKFLOW_PORT: ${INFRAHUB_WORKFLOW_PORT:-4200} + PREFECT_API_URL: http://${INFRAHUB_WORKFLOW_ADDRESS:-task-manager}:${INFRAHUB_WORKFLOW_PORT:-4200}/api ports: - 8000:8000 volumes: - "storage_data:${INFRAHUB_STORAGE_LOCAL_PATH:-/opt/infrahub/storage}" + - "workflow_data:/opt/infrahub/workflow" tty: true healthcheck: test: curl -s -f -o /dev/null http://localhost:8000/api/schema/summary || exit 1 @@ -214,12 +237,12 @@ services: retries: 20 start_period: 10s - infrahub-git: + task-worker: deploy: mode: replicated replicas: 2 image: "${INFRAHUB_DOCKER_IMAGE:-registry.opsmill.io/opsmill/infrahub}:${VERSION:-0.16.2}" - command: infrahub git-agent start --debug + command: prefect worker start --type infrahubasync --pool infrahub-worker --with-healthcheck restart: unless-stopped depends_on: - infrahub-server @@ -234,7 +257,10 @@ services: INFRAHUB_BROKER_ADDRESS: ${INFRAHUB_BROKER_ADDRESS:-message-queue} INFRAHUB_CACHE_ADDRESS: ${INFRAHUB_CACHE_ADDRESS:-cache} INFRAHUB_DB_ADDRESS: ${INFRAHUB_DB_ADDRESS:-database} + INFRAHUB_WORKFLOW_ADDRESS: ${INFRAHUB_WORKFLOW_ADDRESS:-task-manager} INFRAHUB_TIMEOUT: ${INFRAHUB_TIMEOUT:-60} + INFRAHUB_WORKFLOW_PORT: ${INFRAHUB_WORKFLOW_PORT:-4200} + PREFECT_API_URL: http://${INFRAHUB_WORKFLOW_ADDRESS:-task-manager}:${INFRAHUB_WORKFLOW_PORT:-4200}/api volumes: - "git_data:/opt/infrahub/git" - "git_remote_data:/remote" @@ -247,3 +273,4 @@ volumes: git_remote_data: storage_data: workflow_db: + workflow_data: diff --git a/docs/docs/faq/faq.mdx b/docs/docs/faq/faq.mdx index 1d10f16016..2944ea6eab 100644 --- a/docs/docs/faq/faq.mdx +++ b/docs/docs/faq/faq.mdx @@ -77,14 +77,7 @@ Infrahub allows you to load custom schemas to define your infrastructure data mo ### What is the status of the project? -Infrahub is currently in beta, and the team is actively working towards reaching version 1.0 by the end of the year. The project is committed to ensuring data safety and providing a migration path for future releases. - -Upcoming features and improvements include: - -- Scale and Performance: Enhancements to handle larger volumes of data and improve overall performance. -- Proposed Changes & CI Pipeline -- Resource Manager: A dedicated resource management module for better visibility and control over infrastructure resources. -- Profiles: Support for defining and managing different profiles or environments within Infrahub. +Infrahub is production-ready and has been deployed in a number of organizations as a central component of their automation workflows. ### How much data can Infrahub handle right now? @@ -92,7 +85,7 @@ The current data handling capabilities of Infrahub are still being actively deve ### Can I deploy Infrahub in production? -Yes, Infrahub can be deployed in production but keep in mind we are still in beta so please ensure to have the right backup and safeguard in place. +Yes, Infrahub can be deployed in production. If you are planning to deploy Infrahub in a critical environment we recommend reaching out to our customer success team via [Discord](https://discord.gg/opsmill) or contact@opsmill.com diff --git a/docs/docs/guides/accounts-permissions.mdx b/docs/docs/guides/accounts-permissions.mdx new file mode 100644 index 0000000000..d9efd886e9 --- /dev/null +++ b/docs/docs/guides/accounts-permissions.mdx @@ -0,0 +1,219 @@ +--- +title: Creating Users, User Groups, Roles, and Permissions +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# Creating accounts, groups, roles, and permissions + +In Infrahub, managing access and control starts with creating accounts, assigning them to groups, and managing their roles and permissions. +This guide outlines how to create new accounts, accounts groups, and assign roles and permissions. + +For more information on roles and permissions, see the [Roles and Permissions](/topics/permissions-roles) topic. + +## Creating a new account + + + + +1. Login to Infrahub's web interface as an administrator. +2. Click on **Admin** in the left side menu. +3. Navigate to the **Role Management** section. +4. In the **Accounts** tab, click on **Create Account**. +5. Fill in the account's details (name, email, and password). +6. Optionally, assign the account to a group. +7. Click **Create** to create the account. + + + + + +In the GraphQL sandbox, execute the following mutation to create a new account, replacing the appropriate values as needed: + ```graphql + mutation AddAccount { + CoreAccountCreate( + data: { + name: {value: ""}, + password: {value: ""} + # Optional - Assign the account to an existing group + member_of_groups: [{hfid: "Infrahub Users"}] + } + ) { + ok + object { + hfid + } + } + } + ``` + + + + +## Creating a new account group + + + + +1. Login to Infrahub's web interface as an administrator. +2. Click on **Admin** in the left side menu. +3. Navigate to the **Role Management** section. +4. In the **Groups** tab, click on **Create Account Group**. +5. Enter a name for the group. +6. Optionally, assign roles to the group. +7. Click **Create** to create the group. + + + + + +In the GraphQL sandbox, execute the following mutation to create a new group: + ```graphql + mutation AddGroup { + CoreAccountGroupCreate( + data: { + name: {value: ""}, + # Optional - Assign existing roles + roles: [{hfid: "General Access"}] + } + ) { + ok + object { + hfid + } + } + } + ``` + + + + +## Creating and assigning roles + + + + +1. Login to Infrahub's web interface as an administrator. +2. Click on **Admin** in the left side menu. +3. Navigate to the **Role Management** section. +4. In the **Roles** tab, click on **Create Account Role**. +5. Provide a name for the role. +6. Select the permissions you wish to assign to the role. +7. Optionally, assign the role to an existing group. +8. Click **Create** to create the role. + + + + + +In the GraphQL sandbox, execute the following mutation to create a new role: + ```graphql + mutation AddRole { + CoreAccountRoleCreate( + data: { + name: {value: ""}, + # Optional - Assign the role to an existing group + groups: [{hfid: "Infrahub Users"}] + } + ) { + ok + object { + hfid + } + } + } + ``` + + + + +## Managing permissions + +Permissions can be managed through roles assigned to users or groups. +Infrahub supports **Global** and **Object-specific** permissions, allowing fine-grained control over what users can do within the system. +For a complete list of available global and object permissions, see the [Roles and Permissions documentation](/reference/permissions.mdx). + +### Creating and global permissions + + + + +1. Login to Infrahub's web interface as an administrator. +2. Click on **Admin** in the left side menu. +3. Navigate to the **Role Management** section. +4. In the **Global Permissions** tab, click on **Create Global Permission**. +5. Select the action you which to use. +6. Select the decision for this action. +7. Optionally, assign the permission to an existing role. +8. Click **Create** to create the permission. + + + + + +In the GraphQL sandbox, execute the following mutation to create a new global permission: + ```graphql + mutation AddGlobalPermissions { + CoreGlobalPermissionCreate( + data: { + action: {value: "manage_accounts"}, + # 6 is the enum value for "allow" + decision: {value: 6} + } + ) { + ok + object { + identifier { + value + } + } + } + } + ``` + + + + +### Creating and objects permissions + + + + +1. Login to Infrahub's web interface as an administrator. +2. Click on **Admin** in the left side menu. +3. Navigate to the **Role Management** section. +4. In the **Objects Permissions** tab, click on **Create Object Permission**.. +5. Provide the namespace and name of the object(s) you want to interact with. +6. Select the action and decision you wish to use for this permission. +7. Optionally, assign the permission to an existing role. +8. Click **Create** to create the permission. + + + + + +In the GraphQL sandbox, execute the following mutation to create a new global permission: + ```graphql + mutation AddObjectPermissions { + CoreObjectPermissionCreate( + data: { + namespace: {value: "Builtin"}, + name: {value: "Tag"}, + action: {value: "view"}, + # 4 is the enum value for "allow_other" + decision: {value: 4 } + } + ) { + ok + object { + identifier { + value + } + } + } + } + ``` + + + diff --git a/docs/docs/guides/artifact.mdx b/docs/docs/guides/artifact.mdx index 719c5b1a27..baa6a59aec 100644 --- a/docs/docs/guides/artifact.mdx +++ b/docs/docs/guides/artifact.mdx @@ -51,7 +51,7 @@ git commit -m "add tags_config_file artifact definition" git push origin main ``` -The artifact definition will be created in the database, when the Git agent(s) notice the change in the Git repository. The `tags_config_file` should now be visible in the Artifact Definition view in the web interface. +The artifact definition will be created in the database, when the Task worker(s) notice the change in the Git repository. The `tags_config_file` should now be visible in the Artifact Definition view in the web interface. ![Artifact Definition](../media/guides/artifact/artifact_definition.png) ## Accessing the artifacts diff --git a/docs/docs/guides/check.mdx b/docs/docs/guides/check.mdx index 08f4dbd2c4..b71d1c56ea 100644 --- a/docs/docs/guides/check.mdx +++ b/docs/docs/guides/check.mdx @@ -293,7 +293,7 @@ We added 2 definitions to the `check_color_tags_name`: ### 2. Creating a `ColorTags` group -Targeted checks use nodes in a group a their targets. Create the `ColorTags` group with this GraphQL query. Take node of the id of the group, we will need it in the next steps. +Targeted checks use nodes in a group a their targets. Create the `ColorTags` group with this GraphQL query. Take node of the hfid of the group, we will use it in the next steps. ```graphql mutation { @@ -301,6 +301,7 @@ mutation { ok object { id + hfid } } } @@ -333,7 +334,7 @@ We can then use the id's of the tags to add them to the `ColorTags` group. mutation { CoreStandardGroupUpdate( data: { - id: "", + hfid: ["ColorTags"], members: [ {id: ""}, {id: ""}, @@ -415,12 +416,13 @@ mutation { ok object { id + hfid } } } ``` -Take note of the id as we will need it in the next step. +Take note of the hfid as we will use it in the next step. ### 8. Add the tags with the number naming scheme to the group @@ -434,7 +436,7 @@ Add the newly created tags to the `NumberTags` group with the following mutation mutation { CoreStandardGroupUpdate( data: { - id: "", + hfid: ["NumberTags]", members: [ {id: ""}, {id: ""} diff --git a/docs/docs/guides/create-schema.mdx b/docs/docs/guides/create-schema.mdx index 73137d4d56..9fc80fb7b5 100644 --- a/docs/docs/guides/create-schema.mdx +++ b/docs/docs/guides/create-schema.mdx @@ -88,7 +88,9 @@ Load the schema into Infrahub in the `network-device-schema` branch infrahubctl schema load --branch network-device-schema /tmp/schema_guide.yml ``` -We can inspect the schema in the [Web UI](http://localhost:8000/schema?branch=network-device-schema) (Unified Storage > Schema) +We can inspect the schema in the [Web UI](http://localhost:8000/schema?branch=network-device-schema) (Unified Storage > Schema) as shown below. + +![schema page screenshot](../media/guides/create_schema_1.png) We'll create a device and an interface according to the schema, by executing the following GraphQL query @@ -109,6 +111,25 @@ mutation { } ``` +Here is an example of using `curl` to make the query. Make sure to replace the `X-INFRAHUB-KEY` and the IP address with your actual details. Please also make sure to include the name of the branch in the URL. If you want to learn more about GraphQL, you can find more information [here](https://docs.infrahub.app/topics/graphql). + +```graphql +curl -X POST http://localhost:8000/graphql/network-device-schema \ + -H "Content-Type: application/json" \ + -H "X-INFRAHUB-KEY: 1802eed5-eeb7-cc45-2e4d-c51de9d66cba" \ + -d '{"query": "mutation { NetworkDeviceCreate(data: {hostname: {value: \"atl1-edge1\"}, model: {value: \"Cisco ASR1002-HX\"}}) { ok object { id } } NetworkInterfaceCreate(data: {name: {value: \"Ethernet1\"}, description: {value: \"WAN interface\"}}) { ok object { id } } }"}' +``` + +You can verify this in the GUI by navigating to 'Objects' and selecting 'Device' or 'Interface'. + +![schema page screenshot](../media/guides/create_schema_2.png) + +Finally, you can merge the `network-device-schema` branch to 'main' by running the following command. + +```shell +infrahubctl branch merge network-device-schema +``` + We have now successfully created our first schema, loaded into a branch into Infrahub, created the first nodes and merged the changes into the main branch of Infrahub. ## 2. Adding relationships to the nodes @@ -191,6 +212,8 @@ mutation { In the Web UI we can now see that the device has a relation to the Ethernet1 interface. +![schema page screenshot](../media/guides/create_schema_3.png) + ## 3. Abstracting nodes into generics We would like to add another interface `Vlan1` to our device `atl1-edge1`. We could add the interface as a `NetworkInterface` node, however this could cause some problems. The `Vlan1` interface is a logical interface, `Ethernet1` on the other hand is a physical interface. While physical interfaces a lot of properties in common with logical interfaces, they also have their differences. A cable can be plugged into a Physical interface, which you cannot do on a logical interface. diff --git a/docs/docs/guides/groups.mdx b/docs/docs/guides/groups.mdx index b532e32ced..61964f49aa 100644 --- a/docs/docs/guides/groups.mdx +++ b/docs/docs/guides/groups.mdx @@ -79,14 +79,13 @@ mutation UpdateGroupMembers { } ``` -![Adding members in group](../media/group_tagconfig_grp_adding_members.png) The resulting mutation should look like this (note that the ids will be different in your case). ```graphql mutation UpdateGroupMembers { CoreStandardGroupUpdate( data: { - id: "TagConfigGroup", + hfid: ["TagConfigGroup"], members: [ {id: "17a8f438-fe39-d85f-3c3b-c51d66d0603f"}, {id: "17a8f437-157a-e023-3c3c-c51f788ddf91"}]} diff --git a/docs/docs/guides/installation.mdx b/docs/docs/guides/installation.mdx index 0e2b33d674..b7eaea2da3 100644 --- a/docs/docs/guides/installation.mdx +++ b/docs/docs/guides/installation.mdx @@ -64,7 +64,7 @@ cd ~/source/infrahub/ Next, clone the Infrahub GitHub repository into the current directory. ```shell -git clone --recursive -b stable --depth 1 https://github.com/opsmill/infrahub.git +git clone --recursive --depth 1 https://github.com/opsmill/infrahub.git ``` :::note @@ -188,8 +188,8 @@ A first version of our K8S helm-chart is available in our repository. The following are required for production deployments using Helm: * data persistence must be enabled (except for the Infrahub API Server if using S3 storage) -* multiple replicas of the Infrahub API Server and Infrahub Git Agents should be deployed: you can make use of the `affinity` variable to define the affinity policy for the pods -* a shared storage should be available for use by the Git Agents (through a StorageClass that supports RWX accesses) +* multiple replicas of the Infrahub API Server and Infrahub Task workers should be deployed: you can make use of the `affinity` variable to define the affinity policy for the pods +* a shared storage should be available for use by the Task workers (through a StorageClass that supports RWX accesses) * S3 storage should be configured for the Infrahub API Server :::warning diff --git a/docs/docs/guides/menu.mdx b/docs/docs/guides/menu.mdx new file mode 100644 index 0000000000..3aa09e8897 --- /dev/null +++ b/docs/docs/guides/menu.mdx @@ -0,0 +1,177 @@ +--- +title: Controlling the menu +--- + +# Controlling the menu + +Infrahub allows you to control the menu on the left side of the web interface. + +The menu is made up of 2 sections, the bottom section is fixed and cannot be changed. It will list menu items that are related to the working of multiple Infrahub features. + +The top section is the section that can be changed, or controlled by the user. The goal of this top section is to list the different types of nodes that you have defined in your schema, in an order and organised in a way that is most relevant for your use case. + +By default the top section of the menu will contain an IPAM and Other section. + +The goal of this guide is to show you how you can control or change the layout of the top section of the menu. + +## Loading an example schema into Infrahub + +We assume that you have an empty instance of Infrahub started. + +Save the following schema into a file on your disk. + +The schema contains the following nodes: + +- A location hierarchy with a Country and a Site +- A network device with a relation to network interfaces and a site +- A network interface with a relation to a network device + +```yaml +--- +version: "1.0" +generics: + - name: Generic + namespace: Location + include_in_menu: false + hierarchical: true + attributes: + - name: name + kind: Text + optional: false + unique: true +nodes: + - name: Country + namespace: Location + inherit_from: + - LocationGeneric + parent: "" + children: LocationSite + - name: Site + namespace: Location + inherit_from: + - LocationGeneric + parent: LocationCountry + children: "" + relationships: + - name: devices + kind: Generic + peer: NetworkDevice + cardinality: many + optional: true + - name: Device + namespace: Network + attributes: + - name: name + kind: Text + optional: false + unique: true + relationships: + - name: site + kind: Attribute + cardinality: one + optional: true + peer: LocationSite + - name: interfaces + kind: Component + cardinality: many + optional: true + peer: NetworkInterface + - name: Interface + namespace: Network + attributes: + - name: name + kind: Text + optional: false + relationships: + - name: device + kind: Parent + optional: false + cardinality: one + peer: NetworkDevice +``` + +Load the schema into Infrahub using the following command + +```bash +infrahubctl schema load /path/to/schema.yml +``` + +In the web interface you will now see that all the nodes defined in the schema are available in the Other section. + +## Defining a menu file + +Our goal is to define a menu in which we define 2 top sections Location and Infrastructure. + +Under the Location section we want to define 2 items, Countries and Sites. Under the Infrastructure section we want to define 2 items, Devices and Interfaces. + +We can define this menu structure in a menu file. A menu file is a YAML file that has a particular structure (schema). For more information on the structure of the file, visit the menu file reference. + +Save the following menu definition file on your local disk. + +```yaml +--- +apiVersion: infrahub.app/v1 +kind: Menu +spec: + data: + - namespace: Location + name: Mainmenu + label: Location + icon: "mingcute:location-line" + children: + data: + - namespace: Location + name: Country + label: Countries + kind: LocationCountry + icon: "gis:search-country" + + - namespace: Location + name: Site + label: Sites + kind: LocationSite + icon: "ri:building-line" + + - namespace: Infrastructure + name: Mainmenu + label: Infrastructure + icon: "mdi:domain" + children: + data: + - namespace: Network + name: Device + label: Devices + kind: NetworkDevice + icon: "mdi:router" + + - namespace: Network + name: Interface + label: Interface + kind: NetworkInterface + icon: "mdi:ethernet" +``` + +At the top of the file we find some required boilerplate to indicate the type of the contents of the file, so that we understand what content is going to provided and what the expected structure should be. + +In the spec mapping we find a data key which defines a sequence. Here we can clearly find the 2 menu items Location and Infrastructure. Each item in the menu structure has a name and namespace defined and some additional properties to control how it is displayed in the web interface, like the icon. + +The Location and Infrastructure menu items have children defined, the children are menu items that reference a kind that we defined in the schema. This will result in a sub menu item, which, when you click on it, will open the list view of the referenced kind in the schema. + +:::info + +In more complex scenarios where you have to define a lot of different menu structures, you may want to split the menu definitions into multiple files, similar to how we do this with schema files. + +::: + +## Loading a menu file + +You can load the menu file into Infrahub using `menu` subcommand of the `infrahubctl` utility. + +Load the menu into Infrahub using the following command + +```bash +infrahubctl menu load /path/to/menu.yml +``` + +More information on `infrahubctl` command line utility can be found [here](/infrahubctl). +More information on the `menu` subcommand can be found [here](/infrahubctl/infrahubctl-menu). diff --git a/docs/docs/guides/repository.mdx b/docs/docs/guides/repository.mdx index f963da5a90..b77a5a2a3f 100644 --- a/docs/docs/guides/repository.mdx +++ b/docs/docs/guides/repository.mdx @@ -247,7 +247,7 @@ Unlike `Repository`, Infrahub does not automatically update `Read-only Repositor mutation { CoreReadOnlyRepositoryUpdate( data: { - id: "ID_OF_THE_REPOSITORY" + hfid: ["My Git repository"] ref: { value: "BRANCH/TAG/COMMIT_TO_TRACK" } } ) { diff --git a/docs/docs/guides/resource-manager.mdx b/docs/docs/guides/resource-manager.mdx index 237b283744..cb75dbe5d8 100644 --- a/docs/docs/guides/resource-manager.mdx +++ b/docs/docs/guides/resource-manager.mdx @@ -25,6 +25,12 @@ nodes: - "BuiltinIPPrefix" description: "IPv4 or IPv6 network" label: "IP Prefix" + relationships: + - name: vlan + peer: IpamVLAN + optional: true + cardinality: one + kind: Attribute - name: IPAddress namespace: Ipam include_in_menu: false @@ -32,11 +38,33 @@ nodes: - "BuiltinIPAddress" description: "IP Address" label: "IP Address" + - name: VLAN + namespace: Ipam + description: "A VLAN is isolated layer two domain" + label: "VLAN" + icon: "mdi:lan-pending" + include_in_menu: true + order_by: + - name__value + display_labels: + - name__value + attributes: + - name: name + kind: Text + - name: vlan_id + kind: Number + relationships: + - name: prefixes + peer: IpamIPPrefix + optional: true + cardinality: many - name: Device namespace: Infra description: "A Device" icon: "mdi:server" label: "Device" + display_labels: + - name__value attributes: - name: name kind: Text @@ -49,6 +77,25 @@ nodes: optional: false kind: Attribute cardinality: one + - name: Service + namespace: Customer + description: "A Customer service" + icon: "carbon:ibm-cloud-internet-services" + label: "Service" + display_labels: + - name__value + attributes: + - name: name + kind: Text + label: Name + optional: false + relationships: + - name: assigned_prefix + label: "Assigned prefix" + peer: IpamIPPrefix + optional: false + kind: Attribute + cardinality: one ``` Load the schema with the `infrahubctl` command. @@ -58,9 +105,13 @@ Load the schema with the `infrahubctl` command. 1 schema processed in 6.846 seconds. ``` -## Creating an IP Prefix object +## Using IP address pool -Next we will be creating an IP Prefix object, which the resource manager will use as a resource to allocate resources from. +A `CoreIPAddressPool` will allow you to dynamically allocate IP address from on or multiple source IP prefix. + +### Creating an IP Prefix object + +First we need to create an IP Prefix object, which the resource manager will use as a resource to allocate resources from. You can create a prefix `10.0.0.0/24` using the web interface, or by using this GraphQL mutation. ```graphql @@ -80,28 +131,27 @@ mutation { Take note of the id of the prefix, we will need id in the next step. -## Creating a resource manager +### Creating an IP address resource manager We can now create a resource manager of kind `CoreIPAddressPool`. The kind of the resource manager determines the kind of resource the manager will allocate. -We will create a `CoreIPaddressPool` with the following properties: +We will create a `CoreIPAddressPool` with the following properties: - Name: My IP address pool - Default Address Type: `IpamIPAddress` (the kind of the IP address node defined in our schema) -- Default Prefix Size: 32 +- Default Prefix Length: 24 - Resources: 10.0.0.0/24 - IP Namespace: Namespace > Default -The `CoreIPAddresPool` can be created using the web interface, or by using this GraphQL mutation. Replace the id of the resource with the id of the prefix of the previous step. +The `CoreIPAddressPool` can be created using the web interface, or by using this GraphQL mutation. Replace the id of the resource with the id of the prefix of the previous step. ```graphql mutation { CoreIPAddressPoolCreate(data: { name: {value: "My IP address pool"}, default_address_type: {value: "IpamIPAddress"}, - default_prefix_size: {value: 32}, + default_prefix_length: {value: 24}, resources: [{id: ""}], - is_pool: {value: true}, ip_namespace: {id: "default"} }) { @@ -115,18 +165,18 @@ mutation { Take note of the id of the `CoreIPAddressPool`, we will use it in the next steps. -## Allocating a resource out of the pool +### Allocating an IP address out of the pool -We can now start allocating resources out of the `CoreIPAddressPool` we created. +We can now start allocating IP addresses out of the `CoreIPAddressPool` we created. We can use the resource manager to allocate resources out of a pool in 2 different ways: 1. Directly allocate a resource out of a pool. This is typically used when you need to allocate a resource that has no relation to other nodes. For example, allocating an IP address out of a pool that will be assigned to something that is not stored in Infrahub. 2. Allocate a resource out of a pool to a relationship of a node. For example, create a device and allocate an IP address out of a pool and assign it to the device -### Direct allocation of a resource +Please refer to the [Resource Manager Topic](/topics/resource-manager) for further details. -At this stage we can only do this using GraphQL queries or specific methods in the Python SDK. Support for the web interface will come in future releases. +#### Direct allocation of an IP address Execute the following mutation to allocate an IP address out of the pool. Replace the id with the id of the `CoreIPAddressPool` we created previously. @@ -134,7 +184,7 @@ Execute the following mutation to allocate an IP address out of the pool. Replac mutation { IPAddressPoolGetResource( data: { - id: "", + id: "", data: { description: "my first allocated ip" } @@ -152,18 +202,18 @@ mutation { In the mutation we passed additional data to the allocated resource, in this case we passed a description attribute. This description attribute will be set on the IP address that was allocated. You can do this for any other attribute and relationship for the destination address type. +#### Idempotent allocation of an IP address + You can allocate resources in an idempotent way by specifying an identifier in the GraphQL mutation. This identifier links the resource pool with the allocated resource allowing us to create idempotent allocation behavior. This is crucial when you want to allocate resources in an idempotent way using [generators](/topics/generator). Execute this mutation twice, note the identifier. The resulting IP address should be the same, as well as the id. Replace the id with the id of the `CoreIPAddressPool` we created previously. ```graphql mutation { - IPAddressPoolGetResource( - data: { - id: "", - identifier: "my-allocated-ip", - } - ) + IPAddressPoolGetResource(data: { + id: "", + identifier: "my-allocated-ip", + }) { ok node { @@ -174,7 +224,7 @@ mutation { } ``` -### Allocating resources to a relationship of a node +#### Allocating an IP address to a relationship of a node Another way we can use resource managers is in situations where we create a node that has a relationship and we want to use a resource manager to allocate a new resource for that relationship. For example, we want to create a new device (or server) and assign an IP address to the device out of a pool. @@ -189,7 +239,8 @@ mutation { id: "" } } - }) { + }) + { ok object { display_label @@ -213,6 +264,220 @@ Next to the Primary IP Address dropdown menu, you can click on the Pools options ![Add a device](../media/guides/resource-manager-create-device.png) +## Using IP prefix pool + +A `CoreIPPrefixPool` will allow you to dynamically allocate IP prefix from one or multiple source IP prefix. + +### Creating an IP Prefix object + +First we need to create an IP Prefix object, which the resource manager will use as a resource to allocate resources from. +You can create a prefix `10.10.10.0/24` using the web interface, or by using this GraphQL mutation. + +```graphql +mutation { + IpamIPPrefixCreate(data: { + prefix: {value: "10.10.10.0/24"}, + member_type: {value: "prefix"}, + }) + { + ok + object { + id + } + } +} +``` + +Take note of the id of the prefix, we will need id in the next step. + +### Creating an IP prefix resource manager + +We can now create a resource manager of kind `CoreIPPrefixPool`. The kind of the resource manager determines the kind of resource the manager will allocate. + +We will create a `CoreIPPrefixPool` with the following properties: + +- Name: My IP prefix pool for point to point +- Default Prefix Length: 31 +- Resources: 10.10.10.0/24 +- IP Namespace: Namespace > Default + +The `CoreIPPrefixPool` can be created using the web interface, or by using this GraphQL mutation. Replace the id of the resource with the id of the prefix of the previous step. + +```graphql +mutation { + CoreIPPrefixPoolCreate(data: { + name: {value: "IP prefix pool for point to point"}, + default_prefix_length: {value: 31}, + default_prefix_type: {value: "IpamIPPrefix"}, + resources: [{id: ""}], + ip_namespace: {id: "default"} + }) + { + ok + object { + id + } + } +} +``` + +Take note of the id of the `CoreIPPrefixPool`, we will use it in the next steps. + +### Allocating an IP prefix out of the pool + +We can now start allocating IP prefix out of the `CoreIPPrefixPool` we created. + +We can use the resource manager to allocate resources out of a pool in 2 different ways: + +1. Directly allocate a resource out of a pool. This is typically used when you need to allocate a resource that has no relation to other nodes. For example, allocating an IP address out of a pool that will be assigned to something that is not stored in Infrahub. +2. Allocate a resource out of a pool to a relationship of a node. For example, create a device and allocate an IP address out of a pool and assign it to the device + +Please refer to the [Resource Manager Topic](/topics/resource-manager) for further details. + +#### Direct allocation of an IP prefix + +Execute the following mutation to allocate an IP prefix out of the pool. Replace the id with the id of the `CoreIPPrefixPool` we created previously. + +```graphql +mutation { + IPPrefixPoolGetResource(data: { + id: "", + data: { + description: "prefix allocated to point to point connection" + } + }) + { + ok + node { + id + display_label + } + } +} +``` + +In the mutation we passed additional data to the allocated resource, in this case we passed a description attribute. This description attribute will be set on the IP prefix that was allocated. You can do this for any other attribute and relationship for the destination address type. + +#### Allocating an IP prefix to a relationship of a node + +Another way we can use resource managers is in situations where we create a node that has a relationship and we want to use a resource manager to allocate a new resource for that relationship. For example, we want to create a new customer service and assign a prefix of a pool. + +In this mutation we use the `from_pool` resolver to indicate we want to allocate a `assigned_prefix` from a resource pool. Replace the id with the id of the `CoreIPPrefixPool` we created previously. + +```graphql +mutation { + CustomerServiceCreate(data: { + name: {value: "svc-123"}, + assigned_prefix: { + from_pool: { + id: "" + } + } + }) + { + ok + object { + display_label + assigned_prefix { + node { + prefix { + value + } + } + } + } + } +} +``` + +When you use the `from_pool` resolver, the resource allocation happens in an idempotent way, an identifier is automatically assigned to the resource allocation in this case. + +#### Idempotent allocation of an IP prefix + +You can allocate resources in an idempotent way by specifying an identifier in the GraphQL mutation. This identifier links the resource pool with the allocated resource allowing us to create idempotent allocation behavior. This is crucial when you want to allocate resources in an idempotent way using [generators](/topics/generator). + +Execute this mutation twice, note the identifier. The resulting IP prefix should be the same, as well as the id. Replace the id with the id of the `CoreIPPrefixPool` we created previously. + +```graphql +mutation { + IPPrefixPoolGetResource(data: { + id: "", + identifier: "my-allocated-prefix", + }) + { + ok + node { + id + display_label + } + } +} +``` + +## Using number pool + +A `CoreNumberPool` is connected to a node's numeric attribute. It lets you automatically assign numbers from a set range as values for that attribute. + +### Creating a number resource manager + +First we need to create a resource manager of kind `CoreNumberPool`. This resource manager will be linked to an attribute of type `Number` for a given node. + +We will create a `CoreNumberPool` with the following properties: + +- Name: My number pool +- Node: `IpamVLAN` +- Node attribute: `vlan_id` +- Start range: 100 +- End range: 200 + +The `CoreNumberPool` can be created using the web interface, or by using this GraphQL mutation. + +```graphql +mutation { + CoreNumberPoolCreate(data:{ + name: {value: "My number pool"}, + node: {value: "IpamVLAN"}, + node_attribute: {value: "vlan_id"}, + start_range: {value: 100}, + end_range: {value: 200} + }) + { + ok + object { + hfid + id + } + } +} +``` + +Take note of the id/hfid of the `CoreNumberPool`, we will use it in the next steps. + +### Allocating a number out of the pool + +In the following mutation we use the `from_pool` resolver to indicate that we want to allocate a `vlan_id` from a resource pool. + +```graphql +mutation { + IpamVLANCreate(data:{ + name: {value: "My vlan"}, + vlan_id: {from_pool: {id: "My number pool"}} # Here we could either put the ID or HFID + }) + { + ok + object { + name { + value + } + vlan_id { + value + } + id + } + } +} +``` + ## Branch agnostic resource allocation Resource managers have to allocate resources in a branch agnostic way. For example if we allocate a resource in a branch, then that resource should also be allocated in the main branch, even if the resource object does not yet exist in the main branch. @@ -231,7 +496,7 @@ Allocate a new IP address in the `test` branch using this mutation. Replace the mutation { IPAddressPoolGetResource( data: { - id: "", + id: "", } ) { diff --git a/docs/docs/guides/sso.mdx b/docs/docs/guides/sso.mdx index e48cfde6a0..eb013cbd86 100644 --- a/docs/docs/guides/sso.mdx +++ b/docs/docs/guides/sso.mdx @@ -1,24 +1,26 @@ --- title: Configuring Single sign-on --- +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + # Configuring Single sign-on In Infrahub you can configure SSO using either Open ID Connect (OIDC) or can use OAuth2. -The SSO system in Infrahub allows for the configuration of one or more identity providers. While most organizations will only use one provider a reason to have two could be that the providers manage different security domains where one of them might be for regular users the other identity provider could be for administrative accounts. -Infrahub supports three different OIDC providers: +We can enable 3 different identity providers in Infrahub: * PROVIDER1 * PROVIDER2 * GOOGLE -All of them work in the same way the main difference is that the one for Google includes some predefined settings that limit the amount of configuration you have to do yourself. +PROVIDER1 and PROVIDER2 can be used to configure any identity provider that supports OAuth2 or Open ID Connect (OIDC). GOOGLE can be used if you are using Google Workspace as your identity provider, the main difference with the other providers is that GOOGLE has some predefined configuration settings, which limits the amount of configuration you have to do yourself. When configuring Infrahub, setting up OAuth2 or OIDC is fairly similar, though there are some slight differences with regards to the settings you need to have in place. Both options are provided below. ## Setting up OAuth2 in Infrahub -In this case we are going to focus on PROVIDER1 which allows you to connect Infrahub to your first OAuth2 provider. Configuring the first provider uses environment variables with the `INFRAHUB_OAUTH2_PROVIDER1_` prefix, the others follow suite so it would be `INFRAHUB_OAUTH2_PROVIDER2_` and `INFRAHUB_OAUTH2_GOOGLE_`. +In this case we are going to setup PROVIDER1 as an OAuth2 identify provider in Infrahub. Configuring the first provider with OAuth2 uses environment variables with the `INFRAHUB_OAUTH2_PROVIDER1_` prefix. For PROVIDER2 and GOOGLE the prefixes are `INFRAHUB_OAUTH2_PROVIDER2_` and `INFRAHUB_OAUTH2_GOOGLE_`. | Variable | Type | Description | Mandatory | | ---- | ---- | ----------- | --------- | @@ -41,15 +43,35 @@ Aside from the display label and icon all the other entries will be provided by An example of what the configuration could look like: -```bash -export INFRAHUB_OAUTH2_PROVIDER1_CLIENT_ID=infrahub-sso -export INFRAHUB_OAUTH2_PROVIDER1_CLIENT_SECRET=edPf4IaquQaqns7t3s95mLhKKYdwL1up -export INFRAHUB_OAUTH2_PROVIDER1_AUTHORIZATION_URL=http://localhost:8180/realms/infrahub/protocol/openid-connect/auth -export INFRAHUB_OAUTH2_PROVIDER1_TOKEN_URL=http://localhost:8180/realms/infrahub/protocol/openid-connect/token -export INFRAHUB_OAUTH2_PROVIDER1_USERINFO_URL=http://localhost:8180/realms/infrahub/protocol/openid-connect/userinfo -export INFRAHUB_OAUTH2_PROVIDER1_DISPLAY_LABEL="Internal Server (Keycloak)" -export INFRAHUB_OAUTH2_PROVIDER1_ICON="mdi:security-lock-outline" -``` + + + + ```bash + export INFRAHUB_OAUTH2_PROVIDER1_CLIENT_ID=infrahub-sso + export INFRAHUB_OAUTH2_PROVIDER1_CLIENT_SECRET=edPf4IaquQaqns7t3s95mLhKKYdwL1up + export INFRAHUB_OAUTH2_PROVIDER1_AUTHORIZATION_URL=http://localhost:8180/realms/infrahub/protocol/openid-connect/auth + export INFRAHUB_OAUTH2_PROVIDER1_TOKEN_URL=http://localhost:8180/realms/infrahub/protocol/openid-connect/token + export INFRAHUB_OAUTH2_PROVIDER1_USERINFO_URL=http://localhost:8180/realms/infrahub/protocol/openid-connect/userinfo + export INFRAHUB_OAUTH2_PROVIDER1_DISPLAY_LABEL="Internal Server (Keycloak)" + export INFRAHUB_OAUTH2_PROVIDER1_ICON="mdi:security-lock-outline" + ``` + + + + + ```toml + [security.oauth2_provider_settings.provider1] + client_id = "infrahub-sso" + client_secret = "edPf4IaquQaqns7t3s95mLhKKYdwL1up" + authorization_url = "http://localhost:8180/realms/infrahub/protocol/openid-connect/auth" + token_url = "http://localhost:8180/realms/infrahub/protocol/openid-connect/token" + userinfo_url = "http://localhost:8180/realms/infrahub/protocol/openid-connect/userinfo" + display_label = "Internal Server (Keycloak)" + icon = "mdi:security-lock-outline" + ``` + + + This could be the configuration of a Keycloak provider, please refer to the documentation of your intended provider for guides on how to create a client and access the required information. @@ -57,28 +79,56 @@ This could be the configuration of a Keycloak provider, please refer to the docu In order to activate the above provider we need to add it to the list of active OAuth2 providers. -```bash -export INFRAHUB_SECURITY_OAUTH2_PROVIDERS='["provider1"]' -``` + + + + ```bash + export INFRAHUB_SECURITY_OAUTH2_PROVIDERS='["provider1"]' + ``` + + + + + ```toml + [security] + oauth2_providers = ["provider1"] + ``` + + + Alternatively if you are setting up multiple providers each with their different settings: -```bash -export INFRAHUB_SECURITY_OAUTH2_PROVIDERS='["provider1","provider2"]' -``` + + + + ```bash + export INFRAHUB_SECURITY_OAUTH2_PROVIDERS='["provider1","provider2"]' + ``` + + + + + ```toml + [security] + oauth2_providers = ["provider1", "provider2"] + ``` + + + ## Setting up OIDC in Infrahub -In this case we are going to focus on PROVIDER1 which allows you to connect Infrahub to your first OIDC provider. Configuring the first provider uses environment variables with the `INFRAHUB_OIDC_PROVIDER1_` prefix, the others follow suite so it would be `INFRAHUB_OIDC_PROVIDER2_` and `INFRAHUB_OIDC_GOOGLE_`. +In this case we are going to setup PROVIDER1 as an OIDC identify provider in Infrahub. Configuring the first provider with OIDC uses environment variables with the `INFRAHUB_OIDC_PROVIDER1_` prefix. For PROVIDER2 and GOOGLE the prefixes are `INFRAHUB_OIDC_PROVIDER2_` and `INFRAHUB_OIDC_GOOGLE_`. | Variable | Type | Description | Mandatory | | ---- | ---- | ----------- | --------- | | INFRAHUB_OIDC_PROVIDER1_CLIENT_ID | `Text` | The client ID from the IDP | `true` | | INFRAHUB_OIDC_PROVIDER1_CLIENT_SECRET | `Text` | The client secret from the IDP | `true` | | INFRAHUB_OIDC_PROVIDER1_DISCOVERY_URL | `Url` | The discovery URL on the IDP | `true` | -| INFRAHUB_OAUTH2_PROVIDER1_SCOPES | `Array[Text]` | The scopes to request from the IDP | `false` | -| INFRAHUB_OAUTH2_PROVIDER1_DISPLAY_LABEL | `Text` | Display label for the provider on the login screen | `false` | -| INFRAHUB_OAUTH2_PROVIDER1_ICON | `Text` | MDI icon to display on the login screen (ex: mdi:key) | `false` | +| INFRAHUB_OIDC_PROVIDER1_SCOPES | `Array[Text]` | The scopes to request from the IDP | `false` | +| INFRAHUB_OIDC_PROVIDER1_DISPLAY_LABEL | `Text` | Display label for the provider on the login screen | `false` | +| INFRAHUB_OIDC_PROVIDER1_ICON | `Text` | MDI icon to display on the login screen (ex: mdi:key) | `false` | :::note @@ -90,13 +140,31 @@ Aside from the display label and icon all the other entries will be provided by An example of what the configuration could look like: -```bash -export INFRAHUB_OIDC_PROVIDER1_CLIENT_ID=infrahub-sso -export INFRAHUB_OIDC_PROVIDER1_CLIENT_SECRET=edPf4IaquQaqns7t3s95mLhKKYdwL1up -export INFRAHUB_OIDC_PROVIDER1_DISCOVERY_URL=http://localhost:8180/realms/infrahub/.well-known/openid-configuration -export INFRAHUB_OIDC_PROVIDER1_DISPLAY_LABEL="Internal Server (Keycloak)" -export INFRAHUB_OIDC_PROVIDER1_ICON="mdi:security-lock-outline" -``` + + + + ```bash + export INFRAHUB_OIDC_PROVIDER1_CLIENT_ID=infrahub-sso + export INFRAHUB_OIDC_PROVIDER1_CLIENT_SECRET=edPf4IaquQaqns7t3s95mLhKKYdwL1up + export INFRAHUB_OIDC_PROVIDER1_DISCOVERY_URL=http://localhost:8180/realms/infrahub/.well-known/openid-configuration + export INFRAHUB_OIDC_PROVIDER1_DISPLAY_LABEL="Internal Server (Keycloak)" + export INFRAHUB_OIDC_PROVIDER1_ICON="mdi:security-lock-outline" + ``` + + + + + ```toml + [security.oidc_provider_settings.provider1] + client_id = "infrahub-sso" + client_secret = "edPf4IaquQaqns7t3s95mLhKKYdwL1up" + discovery_url = "http://localhost:8180/realms/infrahub/.well-known/openid-configuration" + display_label = "Internal Server (Keycloak)" + icon = "mdi:security-lock-outline" + ``` + + + This could be the configuration of a Keycloak provider, please refer to the documentation of your intended provider for guides on how to create a client and access the required information. @@ -104,17 +172,45 @@ This could be the configuration of a Keycloak provider, please refer to the docu In order to activate the above provider we need to add it to the list of active OIDC providers. -```bash -export INFRAHUB_SECURITY_OIDC_PROVIDERS='["provider1"]' -``` + + + + ```bash + export INFRAHUB_SECURITY_OIDC_PROVIDERS='["provider1"]' + ``` + + + + + ```toml + [security] + oidc_providers = ["provider1"] + ``` + + + Alternatively if you are setting up multiple providers each with their different settings: -```bash -export INFRAHUB_SECURITY_OIDC_PROVIDERS='["provider1","provider2"]' -``` + + + + ```bash + export INFRAHUB_SECURITY_OIDC_PROVIDERS='["provider1","provider2"]' + ``` + + + + + ```toml + [security] + oidc_providers = ["provider1", "provider2"] + ``` + + + -## On configuring the redirect URI +## Configuring the redirect URI in the identity provider Within your identity provider when configuring the client you will need to configure a redirect URI that defines an allowed URI. The convention used for Infrahub is that it should point back to the Infrahub host on `/auth/{protocol}/{provider_name}/callback`. diff --git a/docs/docs/media/group_tagconfig_grp_adding_members.png b/docs/docs/media/group_tagconfig_grp_adding_members.png index 3180f373c8..c8e09d5d1e 100644 Binary files a/docs/docs/media/group_tagconfig_grp_adding_members.png and b/docs/docs/media/group_tagconfig_grp_adding_members.png differ diff --git a/docs/docs/media/group_tagconfig_grp_new_grp.png b/docs/docs/media/group_tagconfig_grp_new_grp.png index e7eceeba49..0147f1b678 100644 Binary files a/docs/docs/media/group_tagconfig_grp_new_grp.png and b/docs/docs/media/group_tagconfig_grp_new_grp.png differ diff --git a/docs/docs/media/guides/create_schema_1.png b/docs/docs/media/guides/create_schema_1.png new file mode 100644 index 0000000000..82a26559d8 Binary files /dev/null and b/docs/docs/media/guides/create_schema_1.png differ diff --git a/docs/docs/media/guides/create_schema_2.png b/docs/docs/media/guides/create_schema_2.png new file mode 100644 index 0000000000..01842be335 Binary files /dev/null and b/docs/docs/media/guides/create_schema_2.png differ diff --git a/docs/docs/media/guides/create_schema_3.png b/docs/docs/media/guides/create_schema_3.png new file mode 100644 index 0000000000..e621da2e84 Binary files /dev/null and b/docs/docs/media/guides/create_schema_3.png differ diff --git a/docs/docs/media/high_level_architecture.excalidraw.svg b/docs/docs/media/high_level_architecture.excalidraw.svg index 79125a302f..71e055c600 100644 --- a/docs/docs/media/high_level_architecture.excalidraw.svg +++ b/docs/docs/media/high_level_architecture.excalidraw.svg @@ -1,6 +1,6 @@ - + - + eyJ2ZXJzaW9uIjoiMSIsImVuY29kaW5nIjoiYnN0cmluZyIsImNvbXByZXNzZWQiOnRydWUsImVuY29kZWQiOiJ4nO1daW9cIkuy/d6/wur5Mk9cdTAwMWFqcl9GenrCNtjQ3jDG29yRxVJcdTAwMDbM2oBcdTAwMTc8uv99XCKx21x1MDAxNEVtmCoo91x1MDAxYqTbt1x1MDAxYmxIqvLEOVx1MDAxMVx1MDAxOcu/v+3sfJ9Mh/b3f+x8t1/q1W67Mao+f/+bef7JXHUwMDFljduDPrxEZv9cdTAwMWVcdTAwMGZcdTAwMWVH9dlPtiaT4fhcdTAwMWZ//3uvOurYk2G3Wretp/b4sdpcdTAwMWRPXHUwMDFlXHUwMDFi7YFVXHUwMDFm9P7enti98f+ZP0+qPft/h4NeYzKy5lx1MDAxZpKxXHUwMDFi7clg9PZZdtfu2f3JXHUwMDE43v2f8O+dnX/P/nSsbmTXJ9V+s2vPfmH20nyBXFy7nzxcdTAwMTn0Z2vFmFCBOKf04yfa4334uIndgJfvYcn2/Fx1MDAxNfPU95LeLXee1TM+sotcdTAwMGad4mHx7GzvbP6p9+1utzyZdmerXHUwMDFhXHUwMDBm4MvMX1x1MDAxYk9Gg4591W5MWr8um+N5v99cdTAwMWFcclx1MDAxZZutvj0231x1MDAxZX88O1x1MDAxOFbr7cnUPIfQx7Nvl+BcdTAwMWY782deZt9fWVhRXCKVpFx1MDAxMjPNPl41v6+0tFx1MDAxNOeCKClcYiZcdTAwMWG71rU36MJ9gHX9hdxrm7H5ymrVeqdcdMvrNz5+ZjKq9sfD6lxi7tb8555/fWOELIThk/jHSy273WxN4DUpLaw1wUrOX1x1MDAxY9uzu4A1U1x1MDAwNG5cdTAwMTGbr8x86rDQmG2If82v/VxitlLB/Er/sdt1XsB+4/1cdTAwMDL+2jjzrUPfn/lz/rXMz+fcW8657Vx1MDAxY9th/ye6urav1eFcdTAwMWU7frDtYqfYr+U+vvrCXHUwMDFlndgvk+9cdTAwMWYv/Pn+t/nyXHUwMDFmh43q277DkmiplabwXHUwMDE3+fF6t93vuL9bd1DvzLfqN8dcdTAwMTdZgsjs4z3QQYlcdTAwMWY6hEBCXHUwMDFhiERcdTAwMDZH8NVIJThcdTAwMDRmXHUwMDE2x1x1MDAxNFx1MDAxM7jgiGLKXHUwMDE3wKGxsjTnhCpcZtBJXHUwMDEyXHUwMDFjWEiLcq6JgNXAZVdqXHUwMDE5JHRcdFx1MDAxYpQoXHUwMDAyq8JyfWgsvLCEgTi36XxVg/6k3H41N4WohWfz1V67a27A/Fx1MDAxYs/2L1xcxXZcdTAwMWa+TOuxllx1MDAxOTc631x1MDAxN17NdttNs6O/12HV9mhhs0/awCxcdTAwMWY/0Gs3XHUwMDFhTq6ow0dW2317VIhi41x1MDAwN6N2s92vdi9cdTAwMDJXXHUwMDA018E+/HXnsOUwerXq2Davmq8tXHUwMDAyMVx1MDAxYkhrXHUwMDFhuZ/8oDUhzIZcdTAwMTZyflfCkFspP9VJvjTJqVxc73DauOtePFxc3aZcdTAwMWK5SsBVxYyTr0lrXHUwMDAwXFxcdLhxSI/0sNpcdTAwMTV63NP309bJeYF2MvvTl0Jm7/BLsZqkfuCQcKtcdTAwMDTYdFx1MDAxZVx1MDAxOVx1MDAxYsFXI53YUMpSilHJKFZcdTAwMDQ5pNOGWY0xS0iuKVVUKo1cdTAwMDRZxohcdTAwMDerMYpcdTAwMTRCRK9cdTAwMGaNr8Zq9Uk3IVJcdTAwMGKx8H6ktrCgxDlcciPlftZBakhoIXV0UnvNXHUwMDFjXGaG+bvsefO+/lq87lx1MDAxZpzc9C7SXHJcXFxmWs5SUnFccrrbsNqiXHUwMDFlVVpYLFx1MDAxMnLvkaojlFxmq6n5mn/5aFxiJDRcdTAwMDPBkUZcdTAwMWZtMsjyKsqz0S5plFx1MDAxYaelfC3/+LXYXGZ2hS8sXHUwMDE4Z1xunHtcdTAwMTXdS1x1MDAwYr5cdTAwMWUphVx1MDAwNVx1MDAxM1x1MDAxNkeIUVwihITNL1x1MDAxNlx0XHLhXHLBXHUwMDAyM2LBLteMYKp83DS5XGZcdTAwMGbBNDh1XHUwMDAyxSD2vlxuo9VGg+exPfqj/9f7XHUwMDEx/KTdb/xPQsxcdTAwMTZi5t3M9r6wXHUwMDFkr3VFXCI4gdchOOl+9lx1MDAwM8mSXGKsXHTi8+1cdTAwMTOGZESaXHUwMDE1Uj0+t1x1MDAxZq5cdTAwMDRcdTAwMTOXP8bX4+x1upEsJbWExuhcdTAwMTe/LXptXHUwMDE4XHUwMDEzXHUwMDEwrlx1MDAwMoNJe0PyVrw2T35TlCHF08hvclI5yD9cdTAwMWSNy1eKZKdPt9f1/HMjXHUwMDEyv/0t6G0vXHUwMDFlTjqC8cfSyeF17bq3J24qnZH321ZHgKmo71s9K7RcdTAwMDet5n71JpuHt8+h3dfXON730j6QpENKjUI3226j6/L+j8JBXGbva+cznfOzcvZgfzrOZn4+nPPLh+NcdTAwMTjet3NQ5PXW/lWzzOza1eBouHdX6sfwvme115/13PA0o8s/ru2D2vS5//BcdTAwMTLD++JTNcKj03Jb3z31RKmafejVLqO9b4iOUowoSZjDS01IR2nfWDdcdTAwMDZ0XHUwMDBiSuXc4ITZ3mDUpdP2amVcdHCrXHTXnGom5mrkzfZSulx1MDAxOduLObKwxiZcYiNcdTAwMDRcXHfPyNmSXHIm5vZw5oikbV5DXHUwMDExXCKZ4its0/U0VOHdXHT/o589K+yU7dGTUyfFKqJCpIRbRFx1MDAwNa8scVx1MDAxOaWY+8lfQNagn8CYOIg6XGbIR/3dqSpcdTAwMWOSdp71XHUwMDFl6/nMSfn1iKVcdTAwMWPIXGZbXHUwMDE0a61hN3qIKFx1MDAwNDh3ukPrXHUwMDAw+S/3s8e6XHUwMDAyinFccqtJo3qqXHUwMDE3zopcctSyn88uXuhtdjpRr3tsffXUOjlcdTAwMTB3uXxlX1WHx4/ssHpP6WNcZix80ytdNI4m+YfBXeHy/qFyJk5qhXhYXHUwMDE4XHUwMDAxXHRyzoRcdTAwMTPGicTmg1x1MDAwZa6E4Fxc0egxvuDbl070XG5hXHUwMDExajIyXGLyomFEY0NvXHUwMDEyNIxcdTAwMTnRsFnENs+cQVx1MDAwYqy2Ub9cZlx1MDAwZoewUcp4mCr/eD3iXHUwMDA0XHUwMDBiTdBcdTAwMWNYYVjWtsrz7PSk0lD1w1x1MDAxZlx1MDAwZplBMStTnltcdTAwMDX70KJiflxi7VxisM3egFx1MDAwMUuC2CaKI0yCwIy1xHWyqXhcdTAwMDYzwXpzLJdCQu5cZmu1i/xpq3TP7i4yz6XzSrfSW5+Q13Djw4hcdTAwMTPcV85cdTAwMTBP3H2lMiB4SDFsL7ZcdTAwMDJzXHUwMDA2X+d0ok0oi3NEuVRG+1xu7Vx1MDAwNlx1MDAxYt9cZtiMpyxcdTAwMTjVoFZAgSMmlzHnQZxIXHUwMDEzKqhT3GyYOEHhwTVTSK+wUdcjzoNRddja2d/9o//Xvj1gXHUwMDBmSVx1MDAxZFx1MDAwMYQwh5s1XHUwMDAzlpU4ZTLsXHUwMDFihFx1MDAwMmklXHUwMDE0gz1cdTAwMTL9XHUwMDA04JKM0FCdPe3dtnZ3z55cdTAwMWbGg/bUTjeIQdpaimkqiafzmlLGxKDLkVCYxFx1MDAxMH6KnTJf5WWmlENPXHUwMDA3Zyi/N8iqfubE9nFcblehzNz+qDKuX1x1MDAxNuzKWD7dXHUwMDFjXYxsze1olFx1MDAxOfi+SUXq14h8R6J4lTzFM+KbuKbB82GE8Ogh6uBtkU7jIJjFXHUwMDA0XHUwMDAwXHUwMDFmca6FkktyelNcZi+FJbUpPYD7XHUwMDBlvFx1MDAxZM03ht/SnHDCtlx1MDAxNqM2XHUwMDE0LynCmzvnP4blVpv2zu7jXHUwMDE46HRUrdXak53ez6SYPoTw3ExcdTAwMWa+uuRcdJ/611x1MDAxZiFccvdcZsk5XHUwMDA1hkG60Xx+KZVGXHUwMDE3j6Wbg8y+vilmO93TdENaSmRRylxiXHUwMDE2Xlx1MDAxOW2p5XulmYb/Ulx1MDAxObI+3a10aZbu9c6aRfRcIjpV1r1fn+6TOvD/XHJcdTAwMGWOmfA/OdZcYlBMNVohZlx1MDAxZHj/Ulx0YkWEhZhAXHUwMDFj4MqlO6F8c7yslMVcci1ruCM++eRenreUSlx1MDAwMPK3llC+eVreq9ZbtqE8u9FcdTAwMWUnRcYhbOQmY781JU7BRHD3s7/gy7niXHUwMDFjczq/sKFFjs0huVx1MDAxMrq+f50rndUqXHUwMDA3pZvjUS3d6MVcdTAwMDTwSVx1MDAxMVx1MDAxOCkqvTiYam1xqcLhu3b6rJZcdTAwMTbmhDIqvViYWJJpx2OJlLGQlJr0lVx1MDAxNLLy7fHes2jR7Dl6XHUwMDFknz7tTZTIPPt4tauwMin1J6dyP1/PtZ461ed8vzEtluJiT8kwXHUwMDEyq+SzfIo9KfE/JVJaXG5cdTAwMDRriO7VXHUwMDA2X+eU4lx1MDAwZmtcdTAwMGKYXHUwMDEzXHUwMDEzXG62xji3LvrEXHUwMDEx6XNt/Fx0U/mlNVx1MDAxMLimSirsXHUwMDExuebLWlhr4C6maVxmYnjL/IlcdTAwMTaeXHLgz3N7OFx1MDAxOJtcdTAwMTZcdTAwMTTTnTL8aWjrJF/eXHUwMDE5jHbgU6rdpFx1MDAxODWEXFzcjFx1MDAxYX2VkTiWsTU4XHUwMDE2+1x1MDAxZU5cdTAwMTFQjsToteggt2vq7JSfdi4qXHUwMDA3I/vicnRC7p7GKVx1MDAwNzlgytKEM+Hn6Fx1MDAxMuA+xrhQKjCvY32SXHUwMDA1kFx1MDAwYmbKPzkyj3lI0VmYzMHyXHUwMDFhb9j1I1x1MDAxZlSLuJBMqHRRrSfVOPZNfe9SVCU+zPHq0ytmte5xq3hcdTAwMWSVaks/Lorjfdks/CCX5Gf5+eSw2vI5eV49t2rlk7dPUS0h/kfEXFxwpFx1MDAxNCHRUyO9r2bKUcilpcHUXHUwMDAwwVx04oFCYrKrNkG1hFiKcu2pdJdrnjE41VxuLIfeKsdu+HS4PdkxXGaWXHUwMDEwl4ZwyNKh8NJq4ih39o0oSd+oMFNcXHLhLIVcdTAwMGXD6fNZXHUwMDA3oWZzl1x1MDAwZk+QXHUwMDE4ilwiespO99OOU6BDrvRH5pSr8Vx1MDAwZUNcdTAwMTanklxiI1NcdTAwMDMrOiNElHxzmFx1MDAxNVx1MDAwNTWstPJ2Rz1AXG5cdTAwMThlgtI4XHUwMDBlgT9cdTAwMGJSXHUwMDAyq1V0lYjneiA9spt2v+FcdTAwMDPRrn0/XHRcdTAwMDDoZDD0Q+fCet1QdH9m4n1cdTAwMDd4QFkm7FSNiODRXHUwMDBirHs6oyp9cVqZdFx1MDAwYsX24VPhqdLNp1x1MDAxY46UaouKWVx1MDAxZD/2oE3wXS0tlVx1MDAwMMOUcMtcdTAwMTDA5Cyw61x1MDAwMUiGLXBcdTAwMDJBk3o0iTNcdTAwMDfj4Fxco22d11x1MDAwNLAq2DHpSPJOSPlcdMc9XkpcdTAwMGVEoCxcdTAwMTBD0c9cdTAwMTnH4yuda+TZiFx1MDAxZVx1MDAxN69cdTAwMDbHe1U8vEh5Ki4oP2xxXHUwMDAzV+8gp9nCwDVcdTAwMDS9+19rbWFfSjH9O4C2vI9cdTAwMTmZR2GxZiBGhdji0UQ8jILFwrNcdTAwMTF63ex8MMJcdTAwMWb9wWhnPFx1MDAxZE/s3maJZnkpO1x1MDAxZSuJRD+Ur0E/3Fx1MDAxN7tkVs9m+k1Exu7Tz261/tRTlf3q4Y/z/nHdzvZpyrFcdTAwMWJGP0JbTFwiJWhITcza54ufY1x1MDAxZiRcdTAwMDUnXGbLXHUwMDE4UPxF2Yf5ujOgnChCnIvIXHUwMDFiWNcm2cp977HWeM1cdTAwMWZcdTAwMWRcdTAwMWOV8FGlmPJmhG/kI1x1MDAwNPLu2zTbwFSYqmtcdTAwMWFcdTAwMWP8W8edwYRbRC9s0Fx1MDAwMOpcdTAwMDHLolx1MDAwNGNcYv9/pJ5cdTAwMWEssWs3/uhcdTAwMDdcdTAwMWH+jVLQ+5K2TUX+cVx0XHUwMDAxW8tkba/QOfH052goOo1i/iV39vhaVuri6jnlQFx1MDAwZWFcIlxmL0uuNXl3hHyBvG788HNMRICJqIrlpO5rXHUwMDEykVxmaJWmXGJYSMzYXG5uUOFggoqvXHUwMDFkfP9SuzjMjc5cbj9vuinfwIxcdTAwMTNLM/bRXHUwMDFjwGNcdTAwMDOLmI6hXHUwMDAymFxir+JcdTAwMDVJLFx1MDAxOIctvXUmXCIrbM/1mIiOXHUwMDFhO1x1MDAwMP3JNFx1MDAwNV6Qx1q2wj1EXG73s1x1MDAxZjFxc2RcdTAwMDNcdTAwMDYkelx1MDAxMG5azZSeUXFSLLxWcoedp0mh6mgzlUroaiUthTVcdTAwMDJ6mSF3sclhajOl9axeg8RcdTAwMTHCiD0p6+CqQTp6byo0O8ZscHlfat48rJ+UNZ10Rb7btHMvfF9li8WXXGZ6eo0hVXqN3mjvf4szg+ZTXHUwMDA0TLRvb1x1MDAxZUI55XiVVqXBty+dINbUXCJEaVxmPlx1MDAxZqZCUTeIN1XBpLSFNdewXHUwMDE27ted3iNcdTAwMWOJqJZcdTAwMDJ+d3tMvOlM6YvquLNzXFztV5uzbqXDkX1cdTAwMGYklVSCV1xiLbmpOXRxceROv5lcdTAwMTJcdTAwMGYsO6tdXVhWXHUwMDEyXHTDXHUwMDEx0Z3B3YfCzajzWq1Me8NKM5dcdTAwMWLX769TXo0owVx0w1p9pE0vliprLSxcdTAwMGXKMbzrsDk05upzUpqBO+ry9T5cdTAwMDCsiWUs6lx1MDAwMrrnSNbGzsRRirjMymQ9Vlx1MDAxZeuDelx1MDAwYnDQ2H+4Z1e3g9bg5KZcdTAwMWSJlUNYToJwXHUwMDAyk8s+lcMynoBcdTAwMDbebfdcdTAwMWLtftP9K3a/MX/FcT3eR5NF6d0zs0T1R3NxkIVcdTAwMTnVXFyBM8aNnZdcXDp+qllcdTAwMWSae2tRupiZMFx1MDAwM8mL3ThcdTAwMWK0+5P3XHUwMDA1Lt2HbnU82Vx1MDAxYvR67clk8SdcdTAwMTe/ZtZAvmVXl+47fFHna27bMDTvuHh//+m4bch5XHUwMDBm0cff//U3z59e3trm4djU8zf45vz/yu3Tfd2KWVE/12yFw/3gvZtOMyaZJZFcdTAwMTk2g1x1MDAxNdHc0ZHp7fdNplx1MDAxYZNcbpsk3cTsmGl6Zlx1MDAwZbKUQMps7LktXHJcYlxmaJNcdTAwMWQk1FYngXzeoHwqMDDrTVI6+qN/nitfJCRCQqjYs13K8pLiiFx1MDAwN/hKXHUwMDBm4i89sFx1MDAxNMBtdFx1MDAxNe1cdTAwMTHcjTGVoNUmjCf8xltpsJLI9O5cdTAwMGJNZlhcdTAwMDezYDf8q7VgXHUwMDA1YC7QQoLrL/FhnFxiXHIoTyRcZr2m+Lg9yVx1MDAxZNDDOznqXFzsXHUwMDFktkYnXHUwMDA3Q1KpxCQ+XHUwMDE04fhzQcTNiY9cZrJM+Vx1MDAwZuwpbKZDMbhPwvFjb+pcdTAwMDNTS7KFuWW/nfrIeOxu83Ds67j0h39yoSnTQMjpb4eWv1x1MDAwNW7fVJqyr6s/wJBcdTAwMDFcdTAwMDS4XHUwMDE2MXhRn1x1MDAxNyDghrJ5OON3XHUwMDEwICF8nFx1MDAwNlx1MDAwMeI8jXJcdTAwMWZGaMyx4CtcdTAwMWNGiIeJPPx5Xz47v33It1C3YrNxymNcdTAwMWbgXHUwMDFhgSNGKCbE61x1MDAxY1FrZlx1MDAwMZw5XHUwMDBlTdBfXHUwMDA3tFjR5Vxih1OBaOSSJ79OXHUwMDE0XHUwMDE1XHUwMDE3IELiOFGMXYDsnozZ9VP/6IXxm8db1VQ/RyNcdTAwMWWPXHUwMDAw0SZVXaVfgGCKuUJcdTAwMTKZZodSXHUwMDEw5iFAXHUwMDAwz/S3jn5kvDa3eTi2dVxcXHUwMDAyxLf+nmlgXFwzUS2yKVx1MDAwYt69qTRlX1d/zCa4cuex98blx1x1MDAxYSYltfIjhI43KD+CO8f5T0pgXG6ZXHUwMDE2SCR6Ui2Xtyen5Ux+cFLM3Vx1MDAxZT83LuuHQqRcdTAwMWK3XHUwMDE4aMJSfJ5cdTAwMTDhSmVcIrOEXGKNWVhS7ZZGfFx1MDAxM86FRDyOw9TYXHUwMDEzI5rn7ctC7/z2Ym/c40M+uea1R5+2Mlx1MDAxYupWXHUwMDEz+L5rlOZcdTAwMDe+71x1MDAxYYlcdTAwMWMhYmxj3XWYf3MrXHUwMDAyXHUwMDFmr0BcdTAwMDSj6OGF4H2RUjPBpIVMlaZcdTAwMTCANmdC+5uZXHUwMDAwfofnJTdSM9GBKsKkVmIzaFx1MDAxNaSE5lx1MDAxZX03lmuKmVx1MDAwMi1AYsme+nzGXHUwMDA1XHUwMDA3XHUwMDBit7mxZrOkhqvBqJPYXHUwMDE0lVx1MDAxMLLzzLJwLyjxomPGfSNcZrBNXHUwMDE1+NZCR09VtitcdTAwMDWCj1ilr8r9p+lL+aiz/zPl81xiMdLcMs33kDe/Y8ktXHUwMDAy5I5QSK59UnPMXHUwMDAyuFx1MDAxZDOlicmAiWGaQ+zkXlC7w9NG42f1/PkuU3lcdTAwMWNccjI3x2h9ck8q6zGpPvP/nbDqXHUwMDEwI0oq6XA6k1x1MDAxMiP+laxYY1x1MDAwNbRIafSwafA+TqdNw1x1MDAxOFlCS8y9xVxi1sKijGpTjp4+MYIllYzzrTY4eVMjm1x1MDAxYu6WvFx1MDAxYVx0oeZ0qFx1MDAxMYX8XHUwMDEzLoBcdTAwMWRcdTAwMDX42Ssgt3/z/DRtv9RcdTAwMWIjodqkbO89Ptv9dCM3bJRcdTAwMWKhymJKhI9H3naTXFwmXHUwMDE4onh7rVCCKI5ccquX5Ydq/e6s0+pOXHUwMDBlrvV5u0LWVyaJTkVlXG6L5JlTXHUwMDA39W+hkiDCWfRcdTAwMTbzwdc5nfiTyFJcdTAwMDJJKjQx40WJazBcdTAwMTR8/2jjydfv3CdcdTAwMDB/oPtN6iE2k1GXUbjcJFx1MDAxN5aOXHUwMDE143FcZoz4PHOuulM9mTNyj9zT2lx1MDAwMzDKR+fZMt2pPcK7J1Y7XHUwMDExQipuXHUwMDFljbC8OLri+mZcdTAwMTBo/7xcdTAwMWZJXGKZnTZFXHUwMDA2dLCFSyWgwWpZYLbUL+/ehWfMmKVRhELktVx1MDAxMlxitDFcdTAwMWGULFxmbvtAMabCXCKEXHUwMDEzkDZcbpx8x7SpjykwXHUwMDAyXHUwMDE0O+XJXGZKXTOPQOVcdTAwMWVf2fToufNw3eE/XHUwMDFh1dZdMXdcdTAwMThHXHUwMDFlgZlcdTAwMDRcdTAwMGVfmcSRR1x1MDAxMEOtXHUwMDA0N01JXHUwMDE1RmR2XHUwMDFiXHUwMDE3Mlx1MDAwMt5yXHUwMDA1mIWxXFw5VzFSUkOwyVlYptBAXHUwMDFjcN1cdTAwMTg3/TvnbVxiPpapLNeY7UjL/FopXHJcdTAwMWVwM48loM3f6Zvz/ytcdTAwMTeb+lpYPlx1MDAxYneBVzj5XGKGUyotLJemzVx1MDAwMmNcdTAwMTSurFx1MDAxOYzhdli0hVx1MDAxMMJcIizUsFZ5XHUwMDFhs8RsVFx1MDAxZnyAoOBcdTAwMWYtW1mynFaJJEeaMrS1MIPU8PHYudyk81x1MDAxYVqTyXCckDBcblx1MDAxMVx1MDAwN25h5FpKJFx1MDAxMYQ/mUYpXHUwMDAyZn/AdsGSO5NLQlx1MDAwZjlcdTAwMDKHeKdcdTAwMTKjUmgzOYtzb1x1MDAxNUSQtoDbJEpSXHUwMDA1XHUwMDExpiyp/VqDmVmXb36OR1x1MDAxOSk4W0pcdTAwMGJG58tOg1x1MDAwMFxuXHUwMDFiXHUwMDAz8NpcInu391x1MDAxZDYwtrGzVztqnXbzyU50jyCA5lqBXCIwf1x1MDAxYYHPqFx1MDAxOeaULOc/gllcdTAwMDVpTFx1MDAxN3h0UZog0qyQ6vG5/XAlmLj8Mb5cdTAwMWVnP0ZcdTAwMWSEy5z5YsxANyBcdTAwMGKKXGKRQkqyXFyLiqnlOlx1MDAwNnMvxntcdTAwMDD311x1MDAxNjNeqDFcdTAwMGYnXmLSMcx/vCA301x1MDAwNcFccok+XHUwMDEz3Hvzp9pIXG4hLKo1Z1x1MDAxYSuqiZhzxpuRlKZcdTAwMGVfstCma2slaFx1MDAxMkuBLOBcdTAwMWGjN+gtW0qPzlVcdTAwMTJTgrnDVdp01OfdVM1XlniCpsmE/Gt9OmzZo6RcIj0hPL+UoOm9pETrQ4T279nLXHUwMDEw3Fx1MDAxNL7CcUnwRPRUQlZcdTAwMTFcdTAwMTAuSjDv3lx1MDAxOKZqxOJSUIqTjO5wsNBi0fd36Fx1MDAxYUEtT00jJWeUXHUwMDEwnMbq1Hbl/ufLbjF31+yWXHUwMDBmOrVqtizq1ZiCOrAtMfpUeDhiUMdbkjg0g7NcdTAwMDZcdTAwMTVcXEBFQGMxyShcdTAwMTbco1x1MDAwNIRbsLFWb4FcdTAwMTEpsFx1MDAxMzxHfHGpcPGEJlK+xdc19VgqseBcdTAwMDK7Wln8dsFcdTAwMWRcdTAwMGa0mccvnMWlhfzHpWum4S7wXHUwMDE1Ri1cdTAwMDfDKZ2GlTJcdTAwMGJR8LiMX8BcdTAwMTFzZ7MqaVFHtUpcIobVXHUwMDFjhZsut7OWqaZccu2ydV2O6oCrXHUwMDBm0lhtr5+0XHTqKC70ur3Domuh7HEpIVx1MDAwNVx1MDAxNKJcYtxcbmhhIYlcdTAwMDZ0zFxmJF/hXHUwMDAz0FSYrlx1MDAxMHRcck79SyU+MVLIXHUwMDAyXHUwMDFiaFoneFalcG0qY1x1MDAxOddCc45cdTAwMTNcdTAwMDIok5aQM9d0Nr/Rw1kxxTNgITCnjGHTMWV+2z6661x1MDAxMCyws0VhXHUwMDFhdFDoiMf8ffY1d4Yyr63b+9sprzzcnFxcraaDPp1gXHUwMDE2UVx1MDAwN1x1MDAwNSd8uXRcdTAwMTChQlx1MDAwM3tcblwizXlcdTAwMDXly+dbIITcRyrbkEGcmpGKXFzM+sBcdTAwMGLFltapf23J31dcdTAwMDP54s48llx1MDAxMVx1MDAxN5MkcrRNd0tcIsKlXHUwMDAyS4Oj+5re+Em1yVWMXHUwMDA0SlwiqVMqiYiCZS+0XHUwMDBm27wkXCLMzPn4XHIkUYhW2Kok8s/do5JcbkrRXG7x24uHk45g/LF0cnhdu+7tiZtKJ+Wdy5Umllwi1KdRKlx1MDAwNp1hKZBLicZvXHUwMDE5OOpIXCLqnekjuFx1MDAwNVx1MDAxZbxnr1x1MDAxMFieXHUwMDEyXHUwMDAy9Nx8j39cdTAwMDVcdTAwMWS0+3Cc0WftycHepNFjbDyVvcbriiUhIGVj1kFcdTAwMGJhXHUwMDEzsJVIMSa5kTjUQ9hQXHUwMDBiLXX5SOKQXHUwMDBiVsOIJoLBx1x1MDAxOU1OMPNcXFxy44tcdTAwMWLIvZxG8/mlVFx1MDAxYV08lm5cdTAwMGUy+/qmmO10T7/4MZdcdTAwMDduzMOJmJhkXGb3zdYxrFxyhoGi6P1cZry3f6qtJMhGS2OTJVx1MDAwNlx1MDAxN1x1MDAxN1x1MDAxZe58XHUwMDFkc1x1MDAwMqVBzUmeXFy+jkmiXHUwMDA2USBAXHUwMDEymNC8w5dcdTAwMGaQMcrMQlLOu7OFU65PXHUwMDFiq0/JmPPcfqGckJBcdCF4t5BxLSVhKSN8j6KpXHUwMDE0YjaEMTJGg7tBpFx1MDAxMqOYMGxxXCJcdTAwMTBcdTAwMDPTXHUwMDA3KOFuKaNcdTAwMDBAJnUsrOfImkl1QJxcYs9cYksxpD2cXHKMwNn07L6qhKZcYtH59/pcbmqmNURHZ3Ymc/j4Mrnmd8Nh0cbN1bptUK1cdTAwMWTD62LO2EFgl8Hz5Fx1MDAxY8GdXHUwMDExoHKX5IOwhFx1MDAwNGc8QD14dyVYWcxcdTAwMDBfU1x1MDAxMEOwUVx1MDAxMXhcdTAwMTdcXMm5XHUwMDEx33FcdTAwMWNIcZNcdTAwMDAr/Fez31x1MDAxY5Iroev717nSWa1yULo5XHUwMDFl1b62lvHHjXnMXHUwMDExXHUwMDEzk57xN5WCa0KVoHN2XHLtUO25/1NuKiW3uFx0+Vx1MDAxYu9JYu3K2qGcWlhyzXCSYVx1MDAxOWRcdFx1MDAwML5imFx1MDAxYYh6lkx6XHUwMDFjVVx0hSRGcXh8n1x1MDAxNjSft1efXHUwMDEyNCf5pORMXGLLu+XMwkKSXHUwMDE1MzhgXHUwMDE2XHUwMDE1ZkiAXHUwMDE5XSFwXHUwMDFh3IMqnVx1MDAxMKVcdTAwMWNcdTAwMTkvXHUwMDBl/Spqdp9VKVxy11xcXG6OJeOUOmY0x5umXHUwMDAzbCR9SrDAKCMuTFx1MDAwNFtjqT1bXHUwMDEywIuGc1OlZ962RI9kbmnn7mB0X7iuVp6PVeNw9Fx1MDAxNFe2TtKnVMFNknZcdTAwMTZjOdJcZoqGTaRcdTAwMTEwXHUwMDFiXHUwMDE3XHUwMDFlKTDKYsvBnJiOqeyaOjvlp52LysHIvrhcdTAwMWOdkLunsddSkcVgLyFcdTAwMDVrVcjsbLK8UGxEwm8+WmdcdTAwMTlys2fdYItLXGL5mllcbm5cdTAwMTQ4jdFcdTAwMTN2glx1MDAxMZVOI8u4XHUwMDA0K0pcdTAwMTCXkpiQiqtxXHUwMDA0+JFcdTAwMTaSsN1USP+5tUo8QNtSrCUlQsxcdTAwMDazzb9/UCFcdTAwMTZHiGGOyTZcdTAwMWLMXHUwMDEyTZ3p7UlcdTAwMGKhZnuSkFx1MDAxMFxuXHUwMDExXGJuIbSwkKSFkK+rwkC7c4x09POp4DZg6YQo0sr0XHUwMDEzXHUwMDE3Wlx1MDAxMUaIXHUwMDE2rlx1MDAxM2SqmIWZXHUwMDEwiUZ1KFx1MDAwMXdcYjFwi5SipvLNS1xyoaWc13m2jnH3wXKnUFx1MDAwN9Fm/qg+uJyI7m71Wl/yRmm8d1x1MDAxZYtcdTAwMGUyxzZcXMsks5aDR3Iu6iDj80qlXHUwMDE5OFx1MDAwZVJRijxqpZSVTKpOZLlcdTAwMDZbSMJcdTAwMGXSsF00Z5J4XHUwMDFjvJmjnY1013fbJJdcdTAwMDZa+OVYXHUwMDA1kC/YzCPjxFlMXHUwMDFhyL9cdTAwMTRcdTAwMWRzXHKuXHUwMDE1JSu4msF4SqWJ/aq16MT048fzrLv/VqKvNd44WFx1MDAxYmyzXHUwMDEyXTNfhFxuqU1RXHUwMDAzjZ63XHUwMDFjPKg9lVx1MDAwMFx1MDAwNXfPXCJcZtDBvFx1MDAwYtFcdFx1MDAwMd7gnIc1211cdTAwMDehXHUwMDFhzK4yRlx1MDAxOUiUYaE9mlJioUwyn/BcdTAwMWFqzCRj4E7xNEqgbGHU2ZPNZuahqK7zu6+DfodE67lcdTAwMWImgVx1MDAxMGZgV8WnJoAlULhFwGEg3Ey2R2ZQvGdcdTAwMWWNxqs3uokkglbRatRkXHUwMDFmSYGYXHUwMDAw9nHMkN1xRIJcdTAwMTRdODNISlx1MDAwNm0tXHUwMDBl5Fx1MDAwYjjzcEItJlx1MDAxNeTg0aXsXHUwMDAxxFx1MDAxNVx1MDAxNzi6jVxyRlQ6bSy4mUxLJYmciaD5u7y3PONmpFx1MDAxYSMgU1xiwcy9sG225FFcZjFA9dZcdTAwMDJB5thcdTAwMTa+ssPof2VcdTAwMWRcdTAwMTSiXHUwMDBmtqmDMEa+5Vx1MDAwNJJJpVx1MDAwNMfRx1x1MDAwZVx1MDAwNPdYTyVITVx1MDAwMZdcdTAwMGKXjidiPfhyrCUwXG4rXHUwMDA109xZxJFcdTAwMWVdc3lcXCmw2tn562Sa3duv5navy1dXsehcdTAwMWFGJFxiXGJGndt+W4VYpqKPgitcYmrAxEKlx7GRa1RcdTAwMDXBn1Mwq0gtk2xcdTAwMDSfXHT+qZRcdTAwMTKL5ZaCwlwimlx1MDAxMmVcdTAwMDbbKFx1MDAwZYtfvtdfSaxkXHUwMDFjeJn9mzh+ZS1Z4j9IzaRcdTAwMDAgMyYpssVcdTAwMGJcdTAwMDZEKi2e1qZAylx1MDAxOVN+M3tcIlx1MDAxObMnsOVdhbFs+jiWsMHp9lx1MDAwNlx1MDAxY86sXHUwMDEw4nRzNePv01x1MDAwNFx1MDAxM1x1MDAxMlx1MDAxZSF87DPacEPSQ/iXjmtzMsOip1x1MDAxNlx1MDAwN89gSSVcbpd1XHUwMDA3IclcdTAwMDCQU8C7MGn+iDOQdMtIxFJcdTAwMTlPhDNT0Fxi63Do/o/R50JcdTAwMGJwnlFcdTAwMWFFyVGdXjdbXHUwMDBmu6Xj8qXIkfr91TXKxCRKlFDqc+204lx1MDAxNyVw81x1MDAxOFx1MDAwMTPNzOxcdTAwMWXw1Fx1MDAxM1Ml0evAsYXg+sCDKsZBfXjVq1x1MDAxM4tg0zyfMyxMXHUwMDE3ffG1ZYlcdTAwMWaczGNcdTAwMTlIMUmWgJaAcFx1MDAwM6TiK1SUXHUwMDA2wyWVxjJcclx1MDAxZFx1MDAwMTmzvDuLLfdcdTAwMDHE6i3cXHUwMDEyRy+NNVx1MDAxYVx1MDAwMWrGPtWzNLWNXHUwMDAwQ2g+XHKNXHUwMDAwMfKf44ipplQ701x1MDAxMEOxXHUwMDFhPFx1MDAwNC6VWMXOXHUwMDFmeS9lTCigwpXFXGI1kVWEqFx1MDAwNlwi8pA2xHRqwCZcdTAwMTlcdTAwMGXsg3RUx8xcdTAwMGK+XHUwMDE13Fx1MDAxMqVxXGaHvrFLm+5T/aJ/XFyalC+PRabS7txk2es0rlRcdTAwMWG4Jjol0oZwhLCm5myCmtUtqVxiaXFcdTAwMGVcdTAwMGVcdTAwMDNcdTAwMTKm5Vx1MDAxOdJELV3XSMrGu2Tbc0lcYlx0XHUwMDAxqlx1MDAwNjYvXHUwMDEyXHUwMDE0c608pFxy6C2FtVx1MDAwMMo3XHUwMDE0zPWy3vpS0sZcdTAwMWZQ5rFcZqWYxI1/JTg3SeQrXHUwMDE1glx1MDAwN1x1MDAwMyaVXHUwMDA2M1xyheDMTORcdTAwMTDec2+X4zRcZkSYisdcdTAwMTlcXKNcdTAwMDKcilxyXHUwMDFlXHUwMDBmJVlcdTAwMDFcdTAwMWVC8nFWgH97XHUwMDA3//fqcFiewFx1MDAxNf2wlXBcdTAwMGLbXHLX0K/vT237edd/XHUwMDFifXvHuoGVPeO/P7/9+Vx1MDAxZl+hOVAifQ== - Graph DB(neo4j)3rd party containeror systeminfrahub open-sourcecontainer or systemLegendinfrahub-bundledcontainer or systeminfrahub-sdkGraphQLGraphQLRESTinfrahubctlGraphQLRESTbrowser (frontend)infrahubAPI ServerGraphQLRESTinfrahubGit AgentGraphQLRPCmessage bus(rabbit-mq)cache(redis)repository store (NFS or local)git repoGQL (Cypher)AMQAMQREDISREDISGITNFSobject store (s3 bucket)https \ No newline at end of file + infrahub-sdkinfrahubctlbrowser(frontend)InfrahubAPI ServerInfrahubAPI ServerGraph DB(neo4j)Message Bus(rabbit mq)Cache(redis)Repository Store(NFS or local)Git RepoLegendinfrahub containeror systeminfrahub bundledcontainer or system3rd party containeror systemTask Manager(prefect)GraphQLRESTGraphQLRESTGraphQLRESTTask WorkerTask WorkerObject Store(S3 bucket)httpsGQL(cypher)AMQAMQREDISNFSgithttpshttpsGraphQLGQL(cypher)REDIS \ No newline at end of file diff --git a/docs/docs/media/high_level_architecture_ha.excalidraw.svg b/docs/docs/media/high_level_architecture_ha.excalidraw.svg index 7894d8d129..c36c5f73c7 100644 --- a/docs/docs/media/high_level_architecture_ha.excalidraw.svg +++ b/docs/docs/media/high_level_architecture_ha.excalidraw.svg @@ -1,6 +1,6 @@ - + - + eyJ2ZXJzaW9uIjoiMSIsImVuY29kaW5nIjoiYnN0cmluZyIsImNvbXByZXNzZWQiOnRydWUsImVuY29kZWQiOiJ4nO1daXPiWLL9Xr+CqPkyb6KtufvSXHUwMDExL17gXHUwMDA1XHUwMDFiylx1MDAxYsZ4ez3hXHUwMDEwIGPMWiAveKL/+8uLXUZcYm1cdTAwMTjJluc1XHUwMDExXW0jLF2ke/KczJs389/fXG6F7+505Hz/vfDdeWravU5rbD9+/828/+CMJ53hXHUwMDAwXHUwMDBlkdnvk+H9uDn75K3rjia///OffXvcddxRz2461kNncm/3Ju59qzO0msP+Pzuu05/8j/n30O47/z1cdTAwMWH2W+7Yml9kw2l13OH45VpOz+k7XHUwMDAzd1x1MDAwMmf/X/i9UPj37F/P6MZO07VcdTAwMDftnjP7g9mh+Vx1MDAwMIXQ/ndcdTAwMGaHg9lgMdeCXHUwMDEzroh6+0Rnslxy13OdXHUwMDE2XHUwMDFjvoExO/Mj5q3vVb1Z6z6qR7zvVO66lb3K8fHW8fyyN51er+ZOe7NhTYbwbebHJu542HXOOy339td987xcdTAwMWb2V+Phfft24EzM18dv71x1MDAwZUd2s+NOzXtcYr29+3JcdTAwMGZ+L8zfeYLfOOJcdTAwMTZcdTAwMTdaa8mUxEyzt6Pm7yUhXHUwMDE2XHUwMDExWEklXHUwMDA1wURj37i2hj14XHUwMDEwMK6/kVx1MDAxYu0wNlx1MDAxZlnDbnbbMLxB6+0z7thcdTAwMWVMRvZcdTAwMThcdTAwMWXX/HOPv74xQlx1MDAxNsJcXFx1MDAxMP526NbptG9dM1xuaWGtXHRcZmN+cOLMnlx1MDAwMtZMXHUwMDExTimbj8xcXHVUbs1mxL/m935cZnOpbP5kcN/reW/goPV6XHUwMDAzf82c+dyhr+/8Of9a5vM7/jnnnXee6bD9XHUwMDEznV84XHUwMDE3am+LXHUwMDFk3DlOpVtcdTAwMTk0dt6++sIkdZ0n9/vbgT9ff5pcdTAwMGb/ftSyX+ZcdTAwMWSWRCtEkZBcdTAwMTjPXHUwMDFmZa8z6Pq/W2/Y7M6n6jfPXHUwMDE3WcLI7PJB8GAkXHUwMDE0XHUwMDFlXGJJgrRALDE8ou9HPuFBsKW0VkxIiiimfFx1MDAxMVx1MDAxZYxaVGCmXHUwMDE0pjJTeGAhLcq5JoJjXG6PXqllmNAldFCiXGKMXG7L9cGxcGBcdFx1MDAwNWlO1Pmohlx1MDAwM7fWeTZcdTAwMGbFY4PNuyW73+mZXHUwMDA3MP/Gs1x1MDAxOVxmd7EzgC9ze9/YmLS631x1MDAxN45cdTAwMTZ7nbaZ09+bMGpnvDDd3Vx1MDAwZZDL21x1MDAwN/qdVstLXHUwMDE3Tbik3Vx1MDAxOTjjclx1MDAxMis/XHUwMDFjd9qdgd07jVx1MDAxY1x1MDAxMdxcdTAwMDdn79eTw5bH7DXsiWOOmq8tXCJRXHUwMDFiyWyUIP+7b9DVkitcZnMpObPVa1x1MDAwZk1Sqro7aqe/N21d907vzq/yXHJdKZAltNKISmqYzVx1MDAwN928M1x1MDAxYiBXSlwiKc0hsZ2j+y19M709PCnT7sb29Km8sbX3pYhccox5XHUwMDE4OsBegsXnJDmvRd+OfIJDKWAuJZmmWFx1MDAxMeSh+Vx1MDAwZuY1xiwhuaZUUVx0WFx1MDAxNWRcdTAwMTkkXHUwMDAxvMYoUlxiXHUwMDExvT42vlx1MDAxYa813V5GtFx1MDAxNmPiw2htYUDZs1x1MDAxYZX+d99wy0CRcqnmzyRcdTAwMGW3z1x1MDAxYrvDUem6eNK+aT5XLlx1MDAwNruHl/3TfONcdTAwMTZcdTAwMDOTWIrAt1x1MDAwNFFcdTAwMWHEashSiYB7g1RcdTAwMTOhbFjN81xifrlpXGJTXGZcdTAwMWFa5tFNc4dFbqNcdTAwMTJcdTAwMWJvkla1dVQtNUr3X4zNWLjWQ0QxrPBcbrCIvlx1MDAxZjmFXHUwMDA18FxiY1x1MDAxYbwjhiVMfuGDXHUwMDA1/yBYYEYsmOWaXHUwMDExTFWInyaX4SGAh1x1MDAwNVxi1lx1MDAxNMTeVyG0xnj4OHHGf1xm/n4zhk86g9Z/ZURsMWbeT2yvXHUwMDAzK1x1MDAwNI0rXHUwMDExv1x0/H5+wyiU37BkXHUwMDEyS43IXHUwMDFjVnFIRqRdJ/bBiXN3Lpg4+zG5mFx1MDAxNC/yjWQpKXhtXHUwMDE4aUzocjxcdTAwMTJjolx1MDAxNpD8KV5bIL8pMMKK55HfpFvfLT3sT2rnilx1MDAxNKdcdTAwMGZXXHUwMDE3zdJjK1x1MDAxMb/9XHUwMDE2ddpGhYxcdTAwMWX7N6UnfkN6/auzi+rIuVx1MDAwZT6tPVx1MDAwNkwlPa99XFzuXGZv29v2ZbHE+P1cdTAwMGXafH5cdTAwMWWncN4zZ1eSLqm2yr1ip4Muats/yrspnNfFzWG3tXfOz1x1MDAxZvuXe5P2ydWBcFI4b3e3wpu32+ftXHUwMDFhc1x1MDAxYefD/dHWdXWQwnmPXHUwMDFizz+bO6OjXHJd+3Hh7Damj4O7p2TnjdM7jChccu7knMUy0js6PCpccihcdTAwMTTgJMxcckOcjYxGRz5tpFaWXHUwMDAw75dwzalmYn6/X2wkpVx1MDAxZmMjMUdcdTAwMTbWWHPOhYD7XHUwMDFlXHUwMDE44VqylcQ8XHUwMDFlzjxcdTAwMTGvXHUwMDBm1jpw+1xikUzxXHUwMDE1pul6Wqf86iv/MShcdTAwMWWXXHUwMDBiNWf84NUzqYqdXHUwMDE4yveLneiRZS53PM/AXHUwMDBmZKFcdTAwMTVcdTAwMTdK6eRqZ3+wOVXlPdIpsf59s7RxWHveZzlHMsNcdTAwMTbFYauvoIKUxTgnNFx1MDAwNST/7Wb2WlfpMK5hNHmUOc3ycaWFbp3H49MnelWcuup5i60vc25cdTAwMGZ3xfVOqb6t7NHBPduzbyi9T4GGL/vV09a+W7pcdTAwMWJel89u7urH4rBRToeGwZdcdTAwMDNjrIVcdTAwMTfHWdCwXGaPOlxieHGuPNG6OPRGP758oldcYotQUP6EoCBcdTAwMWVGNDX0ZsHDmFx1MDAxMVxyk0V82uqwIWJKOVtlon5cdTAwMTlcIo5ho5xcdTAwMTExVeF5UFx1MDAxOFx1MDAwMUVotkLcQTuqxIvTw3pLNfd+3G1cZitFmfM8KJiHXHUwMDE2XHUwMDE1jFx1MDAxM/WyWkzmqJidgFx1MDAwMUuC2iaKI0yiwIy1xE3yUYFcdTAwMDdmoupm+SyHhNxcdTAwMWQ1XHUwMDFhp6Wj2+pccrs+3XisntR79f76hJyVv52tX0xcdTAwMDSjmfvFVEZEXHUwMDBmKYZpy1Zg5Ojnl09cdTAwMTRcdTAwMGJlcY4ol8poak9y5yuI+ceA2Ljg8MA1qCBQ9ojJZSxcdTAwMDdcdTAwMTAy0oRcbupcdTAwMTVNXHUwMDFmvlxuXHUwMDAw5lx1MDAwNP7VK0zU9VxieXdsj25cdTAwMGLbm39cZv4+cIbsLqs1gFx1MDAxOEbys3HEsDKnYoZDo1tcdTAwMDRxXHUwMDA0jlwiPKLEID4jYzRSx1x1MDAwZltXt5ubx493k2Fn6uRcdTAwMWLEIJktxTSVJNApzilcdTAwMTNj0ElIKExSiGulTsXP8myjuoNcdTAwMWV2j1Fpa1hUg41DJ8TZXFyFine2x/VJ86zs1Cfy4XL/dOxonlx1MDAwNlx1MDAxNWdcdTAwMTWqz5bikeYq86V+RsJcdTAwMTPXwKNcdTAwMDKVzpPHvqOnRT6Ng2BcdTAwMTZcdTAwMTNcdTAwMDB8xLlcdTAwMTZKLsn0j2J4KSypzfZcdTAwMDPFwVugyXxu+CvNXHQn7NOC34biJUWelPCsKf5cdTAwMDCGa7edwub9XHUwMDA06HRsN1x1MDAxYVx1MDAxZLfQ/5lcdTAwMTXTx1x1MDAxMJ6f6eNHlz3h03DfW8CTXHUwMDEyMJ/nIZI4TLfaj0/V6vj0vnq5u7GtLyvFbu8o35iWXHUwMDEyWZQyglx1MDAwNVx0yNTOLeErzTT8l8tY+NFmvUeLdKt/3K6gJ9G1We9mfb7PaslcdTAwMWZcdTAwMWapMVx1MDAxZVx1MDAxZtU6+vqhL6p28a7fOPtaS9JMhK9Ja4S0pFx1MDAxYa1cdTAwMTBcZo98frlcdTAwMDSxXCLCQkwgXHUwMDBlcOVcdTAwMTJ5YPGxxKyUxVxyL2t4XCIhXHTlQa63lEpcdTAwMDDyPzGj/KN5ectu3jqG85xWZ5JcdTAwMTVcdTAwMWLHsJGfjcPGlDlcdTAwMDdcdTAwMTNcdTAwMTG+XHUwMDEyzTFhkq+wzbE9XCLnQje3L3aqx436bvXyYNzIN3gxXHUwMDAxeFJEQzZLYaq1xaWKR+/a+bNaWphcdTAwMTPKqFxmXCJhYkmmPa8lTsZCUmryYnJIyldcdTAwMDdbj+KWXHUwMDE2T9Dz5Ohhy1Vi4zHEq12FlO+extuHW/c1fHHYuZ406Y8tXHUwMDFiP6ZFnlJS7DHlWcWtPVbLjz6lpUBIrODVRt/nnOJcdTAwMGZrXHUwMDBiiFx1MDAxM1x1MDAxM8ph+oNz62NPnJA918afUHAhrYG/NVVS4YDINV+WwlpcdTAwMDN1mb1kX54+0cK7XHUwMDEx9HnijIZcdTAwMTNTh2JaqMG/hrVcdTAwMGVLtcJwXFyAq9i9rFxiNYZcXPyEmnyUiSiWsTUoXHUwMDE2hy5OXHUwMDExXHUwMDEwjsTIteQgd1x1MDAxYer4iFx1MDAxZnVP67tj5/RsfEiuXHUwMDFmJjlcdTAwMDc5YMrShDNcdTAwMTHm51x1MDAxMuA+xkzaW2S+yPokXHUwMDBiIFx1MDAxN0xcdTAwMTmSReY19468O5M5WF7jXGb7PvJGtYhcdTAwMGLJhMpcdTAwMTfVXHUwMDA2Uo1n3jS3zoQt8d5cdTAwMGW3XHUwMDFmnjFr9Fx1MDAwZW4rXHUwMDE3Sam2+uO0MtmW7fJcdTAwMGZyRn7WXHUwMDFlXHUwMDBm9+zbkFx1MDAxNe2Vc7ZWX3l7XHUwMDE31Vx1MDAxMlx1MDAxMr5EzFx1MDAwNUdKXHUwMDExglx1MDAxM6Mw+G7mXHUwMDFjhVxcWlx1MDAxYUxcclx1MDAxMJwgXHUwMDAxKCQma+sjqJZcdTAwMTBLUa5cdTAwMDOV7vKeZ1xmPrVcdTAwMDLLoT+VYz94dbjjXHUwMDE2XGaDZcSlMVx1MDAxY7K0KLw0mjS2O4dcdTAwMDaUZERlKiSVYCB3XHUwMDEyXHUwMDAz9fG4i1C7vclHh0iMRFx1MDAwNT1cdTAwMTSn23lcdTAwMDcq8CFX+i0la1x1MDAxMaiaIYtTSYTRqZF7Olx1MDAxM0SUQpOjXHUwMDE1XHUwMDA1Oay0XG72R1x1MDAwM1BcbiBlgtI0VoHfvZOTMPhhXHUwMDE1p209lO47bWfQXG7BaM+5cSNcdTAwMTDqXHUwMDBlR2HwXFxcdTAwMTivXHUwMDFmi/5rZl54gEdszISZqlx1MDAxMVx1MDAxMXyukeLg2NdcdTAwMWKqPlx1MDAxMEd1t1eudPZcdTAwMWXKXHUwMDBm9V4p53CkVJuiIGYnP1x1MDAwZeBNcF4tXHJmSXKRcc1cdTAwMTDA5CywXHUwMDFiXHUwMDAwSIYt8Fx1MDAwMkGUXHUwMDA2VIozK+PgXaPPWq+JoFWwY1JkvkQhPM/YP4VcdTAwMTlnXHUwMDE0XHUwMDE00VxuS1x1MDAxNJPJud5pldiYXHUwMDFlVM6HXHUwMDA3WzZcdTAwMWWd5jzHXHUwMDE3viW2uIFrcJTTTGHgXHUwMDFhgl5cdTAwMWSwtaZwKKVgLC2greBlRrZcdTAwMWNbQZqBXHUwMDFh9Vx1MDAxNqr8ooyCxcK7XHSK3Vx1MDAxNN5cdTAwMTjhj8FwXFyYTCeu0/9YollcdTAwMWVKIWAkieiH8jXoh4di1yRcdTAwMDUqaSpOJMbuw8+e3Xzoq/q2vffjZHDQdIpcdTAwMDOac+zG0Y/QXHUwMDE2k0hcdFx1MDAxYbPZZu31xfexXHUwMDBmkoJcdTAwMWJcdTAwMTSlgOIvyj4swp9cdTAwMTFUIeGt51x1MDAxNbvDpOFcdTAwMTbrN/37Ruu5tL+7X8X79UrO61x1MDAxMb6wj1x1MDAxMG+lLVx1MDAwMmYwXHUwMDE1ZkM3jVx1MDAwZf+t489gwi2iXHUwMDE3ZmhcdTAwMDT3gGlcdTAwMDE/kyH86dxDVpieKXFPXHUwMDAzhthzWn9cZlwiLf+HctDrkD6bi8IjXHUwMDEzXHUwMDAyppbJ255fM7Z24tHP8Uh0W5XS087x/XNNqdPzx5xcdTAwMDM5hoowXHUwMDFjllxca/LqXHSFXHUwMDAyed1cYuL7qIhcdTAwMDBcdTAwMTVRlcpa3ddkXCKJQ1OoiYJHylx1MDAxMU4upfCTvP+xd7z3vHV4u3nQwz/Pi+e3OZ+/jFx1MDAxM0sz9lZ1IGD+ipTWoVwiiFxir+JcdTAwMDVcdTAwMTFcdTAwMDTDxFgotv6s/SpMRMetXHUwMDAyQN+d5sBcclxuXHUwMDE4y6dwXHUwMDBmkcL/7i/szlx1MDAxNm3AgCSPwk3tjeojqriV8nN9Z6/74JZtT6WpXFxiVytpKaxcdTAwMTF/Lf+5WOcwt6nSerZjg6RcdTAwMTHDSD0ta/e8Rbp6ayo0O8BseHZTbV/erZ+WNXV7otRrOztPfFtcdTAwMTUrladccvTwnEKu9Fx1MDAxYeXRXn9KM4fmXVx1MDAwNEw8W/r8XHUwMDA0TDnlXHUwMDE4cJxcdTAwMTjE0Y8vnyDW1FwiJi9cdTAwMWR8PkyFon5cdTAwMTB/1Fx1MDAxZSalLay5hrHwsFx1MDAwMvVcdTAwMDHxSES1XHUwMDE0WP0/SpU+tSfdwoE9sNuzgqWjsXNcdTAwMDMklVWKV1xmLfmpOXZwaSRPv5iSXHUwMDAwLLNwLHNcbpJNKpU8c3rzrnw57j7b9Wl/VG/v7EyaN1x1MDAxNznfj6iYXFzoMrG4WVlhZGHK3lxcwfBcdTAwMTVcdTAwMDWzaszV+7Q0XFxDhJIxsUhoTFx1MDAxMmljZ9LYjLjMyiSelaNcblhx+ErvSjSZuCBTNzuDVmfQ9v+JM2iFXHUwMDFj6dlcdTAwMTN3a9jvd1xcXHUwMDE4xvGwM3D9n5idt2hgcOvYS/dcdTAwMDLO7D3mx8vInHFRicx/Ksyn1OyXt5//9Vvwp5dcdTAwMWW3eXlcdTAwMWX0/Fx1MDAwNN+8/19cdTAwMTnb3iyGpTqbwqRcdTAwMDdKkVx1MDAxY9zRhdTyXHRuji0q4JZcdTAwMDZWXCLQWltcdTAwMTLDVEXZglx1MDAxYqyHXFyoKzBcdTAwMDc3t1x1MDAxOJeIelx1MDAwZv5cdTAwMDK3IWlNtcokzLMmuJVJ9k1cdTAwMDHcnoG99lx1MDAwMXxh0MHl48O089RsjYXqkJqzdf/oeEvnXHUwMDAy3zfvzSg3wItcdTAwMDFXisKAQFBwXHUwMDAyukJ5Pta2R2bUwlRXW6irNpuvT4uWYumWLFibsKFGl1x081xmXHUwMDE1UFx1MDAwZjPQrF2D90dcYubzXlxu85GCrvRJuUQj/VrWz49cdTAwMDfz8iBhRetcdTAwMTe9NTuixCFcdTAwMTeMYSRR8q3ZXFxeXHUwMDFkXHUwMDFl1TZKw8PKztXBY+usuSdEvi0gxlx1MDAxNFuKz1x1MDAwM1x1MDAwZb5YIZlcdTAwMDVcdTAwMWNgWsYtWn1SXHUwMDE3LcK5kIin4aykXHUwMDFleGifdM7K/ZOr061Jn4+4e8FcdTAwMWL31fVcdTAwMDNcdTAwMGZr7Fx1MDAwN4s871x1MDAxYcnvkeddI1DyXHUwMDBi4GFU81H711j47lGCXHUwMDA0V1x1MDAxMmmUfG07el7k1EyAXHUwMDFihExcdTAwMWGkXHUwMDEwgDbvgvGLmSBcdTAwMDSIXHUwMDE28Elfuu1lZiawMGtcdTAwMTdYU1xmXHUwMDE3XHUwMDEymlx1MDAwN+xsWU7aZeCjMpJKdPL9XHUwMDExXHJcdTAwMGVcdTAwMTbu4yqSz4JcdTAwMDbnw3E3s/qnMWRcdTAwMTdcdTAwMTjF8Fx1MDAwZijzrF7Gw5NSXGIlXHUwMDEySbZcdTAwMDK/O/UywfusPlC1wcP0qbbf3f6Z81ZcdTAwMDJcdTAwMThpboFipMH9xDCW3FwiQO5cYsWsZWdVgTyq9zNTmlx1MDAxOE84hXqJqZN7WW2Ojlqtn/bJ4/VG/X483Lg8QOuTe1arXG5ZVXLLYVx1MDAxM5PI82ZcXImGU555VXZcdTAwMTaeKlxubqxcdTAwMDJapDT58kr0PM6nTcPYdP6VmFx1MDAwN4tcdTAwMTGshUVNujuKWV/5XHUwMDE0MYIllYzzT91B9KJGPq4se/ZqJIaa86FGvFsrl9xcYixcdTAwMTShKHnhV3l583h22a6WTtjZ4ejo+KTemOR8XTSuXHUwMDA2O6HKYkrENzb65Co0YOmVMKs+OdQl+0/ocPhz46B/N7VcdTAwMGY2R9ulPXu8sb4uybabXHRZKJ6WVVOx8JbgQFx1MDAxOdJEXZMvdkTf5nyiT5rewUhSoYnpXG5CfHWXXHRnydqKrb8xXlx1MDAwMPpA9MNQXHUwMDE0XHUwMDE2Xlx1MDAwZiyqXHUwMDA2jaJqVpFcdTAwMTGtj7r391BddaZcdTAwMDbyZuJcdTAwMWE0R4074JO3yi41Wmjcw9kzy0yI4Vx1MDAxND+LJlx1MDAxOF5cdTAwMWFVZ0LXLzVcdTAwMGX37qlcIlxmUa6SXHUwMDEzarSJyyWkgUYtgshbs3B/M1XGLI1cdTAwMTIk+q6zfmnUNpr577M6MqAtl9GMKbNcdTAwMDRh2JRPZ9hb1PlcctxcdTAwMDJsMOXZ9DlJsJpcdTAwMTnFfWrn/plN91x1MDAxZrt3XHUwMDE3Xf6jZd9eV3ZS61x1MDAxZv5ug5JwqXSF9Uduan8oXGZDXCLwnKR3ifFl+ZFZXHUwMDE4S2+kppDaMmm05VlcdTAwMTim0EAgMFVcdTAwMThXXFxyvLxKqixfl6xEw/xKa6RcdTAwMWLhoDOvJbjNz/fN+/+V8zpDSyTwWW1JvMJcIkg0qHJpbbk0W1x1MDAxYVx1MDAxOKOcaFOF0u+8aMs8XHRcdTAwMTFcdTAwMTd1WMfYgkFcdTAwMTezuvhwXHUwMDAx8EBUgHJcIlx1MDAwMVx1MDAxOZ0wTTRl6NNcIlx1MDAwZVLD5bF3uFnvrbh13dEkI5VcdTAwMTQjXHUwMDE0/CrJN5REilxiR2+hXGJVRFwiotAmUrOETY+yjsNodCeuXFxiVFxubapUc1x1MDAxZayIXGLSlpoldWSoiFxiU5bUYbtwTWOJXHUwMDE3r2c5qUubPVtcdTAwMWGoLVcqKK7kXkOh/t2BfXpzvHu05/S7U3TRuF21tVxuxu/aeFx1MDAxNa6C5oKBXCKwflx1MDAxYYHvq1x1MDAxOfb2WvBcYlx1MDAxYlx1MDAwMSqZLuRcdTAwMWMt6pPgtsuJtc58MKZ2OnBcdTAwMDVcYj9cIoWUc0oteJK8fFx1MDAwYmL+wVx1MDAwNDe7+tqKJlxiNOblhUtKMoZFdJdXVFxiY1x1MDAxZlx1MDAxMtvI4Mmfa1x1MDAxYimEsKjWnGmsqCZiTlx1MDAxOS82UlJcdTAwMGKbTkNx25vXsZGCWFxuVFx1MDAwMddcdTAwMTi9QG/ZUFx1MDAwNm1cdTAwMTJFpmoxuPafJWTWslXvXHUwMDEyMrvV/T9cdTAwMDZ/b05Ht844q7BPXGbPL9VJXGZcdTAwMWVSXHUwMDFhe0PDhY1cdTAwMGWvj8NcdTAwMTBRnK+w5lx1MDAxOd1+LJeYVVx1MDAwNISLXHUwMDEyLHhcdTAwMWJcbiaEWVxcXG5KcZaRXHUwMDFlXHUwMDBlJlosRlx1MDAwMDy6RlArSNNgKTmjhGTTU37N0E6nfvPzabOyc93u1Xa7XHK7WFx1MDAxM007ndCOKW+JXHUwMDEx88Ij5dBOsCbxiFx1MDAwNm9cdTAwMTY8uICKII6ZZFx1MDAxNFx1MDAwYr6sgTC3YGItXHUwMDE0wiykXHUwMDE23olu2rU4VLh5Qlx1MDAxM2lcdTAwMDZcdTAwMDO8r2nAUIlcdTAwMDU3mIlVI1FfSlx1MDAxMFx1MDAwNaHNvH7hLC0xXHUwMDE0XHUwMDFlQlx1MDAwN29cdTAwMDSeXHUwMDAyX6FmYDSc8mlYKbNcdTAwMTBl1FRcdTAwMTdcdTAwMTBcdTAwMWMxf2Krklx1MDAxNmVcdTAwMTJmXCKJTmxdx7CaZXFTUFx1MDAwNiu4hKn4smxdl6M64OqDNlafV7rJXHUwMDA0dVx1MDAxNFx1MDAxN3rd5bDkWqh4UM1IXHUwMDAxxShcdTAwMDK/XHUwMDAyWlx1MDAxOEimXHUwMDAxXHUwMDFkU284VPiYMtGYrlx1MDAxMHSNzlx1MDAwMswlPjFSyFx1MDAwMlx1MDAxYohcdTAwMTVcdTAwMGLeoMK1XHUwMDA1qodxLTTnOCOAmk3AcuabvoTbXHUwMDAz5Fx1MDAwZsVcdTAwMTZYXGLMKWNma6unhfOv4Fx1MDAwZWgggb3VXHUwMDAw8qCDYvsplG6KzzvHaOP59urmasrrd5eH56vpoHfnmiXUQdG5Xz5cdTAwMWREqNDAnoJIs15B+fIqXHUwMDE3XGKh2Vx1MDAwMtgnyyBOTf9cdTAwMDIuZiXXPHWR3sapf03J/1xcXHJcdTAwMTSKO/NaRlxcSpJIhOdcdJmdiWBpcHJfM1x1MDAxOD+5NrmKkUhJJHVOJVx1MDAxMVEwbFx1MDAxMHSfKYlcYjMlNf9cdTAwMDMkUYxW+FRJXHUwMDE0XHUwMDFhXHUwMDBiUlJITlx1MDAxNV4hflx1MDAxYtnyNp/41MRShIaUJMEgMyylWHx5yrVcdTAwMTaiwU9HvspcdTAwMDSeUFx1MDAxMLfMzsyF9Nq3XHK7XG70gPQsXHUwMDA1f1x1MDAwNVx1MDAxNTRC9c12u3pxdzvaXHUwMDFkjXZ7ra22fbPaXHUwMDEyl5Las/Sa6lx1MDAxMtcsaFx1MDAwMpZcdTAwMTIpxmD6g2KgXHUwMDAxsoZcdTAwMWHx41x1MDAwYu9kscZcdTAwMDWjYURcdTAwMTPB4HJGkVx1MDAxM8xcdTAwMDJH469s4Vx1MDAxZk5wa9mvLWhcdTAwMDJgY15ewKQkYnh4O1x1MDAxM1BUXGY0ZfJk5+DZn2tcdTAwMWKpTZ9LbFx1MDAxMsXg3sLLn6xj1p80SDnJs0vWMfnUoFxihGnlhMFcIs89wlxiXHKjwbIrXHUwMDEwmfNcdTAwMGZ/wlx1MDAxMte7bdW7NMzJzna5lpGKiaH3pXaai0PJWMdEtZSXXFxgjFTyzcnRZSFyXHRSk51ocVwiXHUwMDEwXHUwMDAz01x1MDAwNzDhfiWjXHUwMDAwQSZxLK74yJopdUCcXGLPXGJLMaRcdTAwMDNcXFxyjMDVRDpAznCEXHUwMDE1OOc0hU1cdFx1MDAxZihn7i+d9s20sSNP7b37VvFC7Zamo1x1MDAxNetuYC+DpJyxg8Ayg+PJOYJHI0DlLulcdTAwMDdhgdD3VVx0WpRcdTAwMGbB9Vx0VlYzQNhcdTAwMTTUXHUwMDEwzFSE4fkryVx1MDAwMlxuSFx1MDAxMYub/FdcdTAwMTE+muC2vl9azIRcdTAwMDPHvOaQSUnQhFx1MDAxYktKKGhMukJiY/D8z7mtlNzis8aQQmKJtS9rh3JqYck1w1lGZZAlqFx1MDAwNvxjaiBcdTAwMWG4ezIo/9g0jVx1MDAwM+v+mWk77zdY79I0h6WsXHUwMDE0TVxmz/tcdTAwMTXNwkCy1TM4ouyzKa9cdTAwMDd2dIXAaXQ5qnxiXHUwMDE0bJDx41BwR1NcdTAwMTPIhHsuXHUwMDA1x5Jx6i1cdTAwMTWRbppcdTAwMGXQUUhBSWOVXHUwMDExXHUwMDE3JoKtsdSB1Vx04KAh3VxcXHSalynRJ1x1MDAxYle0e707vilf2PXHXHUwMDAz1dpcdTAwMWI/pJWtk/UqVXS9pMJiNEeankwwiTTSXHUwMDA05klAXG6MsthyOCelZarovslcdTAwMGLiiMFcXEJcbsaqkJnZZHmg2KhcdTAwMDS9el7RV5JCy5CbvetcdTAwMDdbWkoo1MxScKRcdTAwMDSTyVx1MDAxM3aiXHUwMDExlU8ja/q7XHUwMDEzQCuXkpioiq+IXHUwMDA0eJJcdTAwMTaSMN1UTCm6tbZ4gLil4KRTXCLES63mREKII8QwxySFXHUwMDE0gPcvUGnT2urDhFC742YkhGJcdTAwMDSCX1xiLVxmJGshXHUwMDE06qtoilx1MDAxNVx1MDAwN4U+/0BsXHUwMDEzm8iKYPmEKNJcbtxm4Fx1MDAwNkVcdTAwMTghWvhWkKliXHUwMDE2ZkJkXHUwMDFh16FcdTAwMDT8IVx1MDAwNG4hVYoqXHUwMDFjmLKM0FLO6zxbx/j7YLlzqINou7TfXHUwMDFjnrmit2lf6DPeqk62TlLRQYxoTFx0S3kr1oK4iO5+saiDjNMrlWbgOEhFKVxu2CylrGxSdVx1MDAxMss1mEJcdTAwMTJmkIbpojmTJGDpzSzurC7V3iGA/DbJp4FcdTAwMTb+OFVcdTAwMDFcdTAwMTRcbjbz2vDiLCVcclx1MDAxNL5cdTAwMTVcdTAwMWRzXHKuXHUwMDE1JSu4mtF4yqWJ/ap70Vx0eFx1MDAwYlx1MDAxNM+z7v7aib5WJ6FobfCZO9E1XHUwMDBiRaiQ2mxqoMnzlqN7ouVcdTAwMTKg4O5ZhFx1MDAwMTpY8EZ0QoA3OOdxdXfXQahcdTAwMDazq4xRXHUwMDA2XHUwMDEyZVjogPqUWCiTzLfYp/NXmTvJXHUwMDE4uFM8j1x1MDAxMqhYXHUwMDFld7dku71xV1FcdTAwMTelzefhoEuSld+Nk0BcYjOwq1wiy/Ylq2zcXCJcdTAwMDKmXHUwMDA3N03kkOnJXHUwMDE2mEmTWfOSVbRcdTAwMWE1+UdSICaAfdR8Rlx1MDAxNzyRIEVcdTAwMTdcdTAwMTZccrKSQZ9cdTAwMTZcdTAwMDdcblx1MDAwNZx5eaGWklxu8vCoP1x1MDAxMmSaYnCBk9vYaETl08aCm8m0VJLImVximp/ltfxcdTAwMTm3TJFPXHUwMDAyMoVcdTAwMTDM/Fx1MDAwM/vMkjyKIVx1MDAwNqj+tECQWbeFr+wx+l9ZXHUwMDA3xeiDXHUwMDBm0EGRdX+xdy3Gn7FMwFwiKiWTJ/pEN7/KJVAlXHUwMDEzXHUwMDE2oVhi4Fx1MDAwNiOGXHUwMDE2XHUwMDFiomrjXHUwMDFlwrFf4aDwPmufXFz3XHUwMDE3M7NcdTAwMDFcdTAwMWZjlMlW9jVcdTAwMGL/spF9Vruzm9fH3dueu3uhTzp1kkhcdTAwMTH9XHUwMDE2ddo1XG7/Rp43ultg5HljXHUwMDE1XHUwMDFjRVx1MDAxMiu0ioJ7XHUwMDE3+2JcdTAwMTLu41x1MDAxOFx1MDAwMz/bwJJcdTAwMWPX0Vx1MDAwZjCfuJbCouDOXHUwMDAxkZloL14kYI3lQpg3Q1xcUzFLtTX7+ZjGglx1MDAwNqTaLldcdTAwMTRWXHUwMDAymY2jLFx1MDAwNTS/v6AwY9LTvCzrgsL7Q7tV2LR7NsxS00x4r1hcdTAwMTiNh0/TrCrLxHCVn5hcdTAwMTNcZi+NgsJhgFaeXHS8hGfwsFxmbSRfW52Ud11Uee7im6fG6d7O+Lj887KXbzybLFxiSzNThY6FdVx1MDAwM+RcdTAwMWHr2ILCXHUwMDBl0CTF71x1MDAxM9RKWpxcdTAwMDXXzlsuXHRlZFx1MDAwM5fePfBcdTAwMWaPYcLgh4+rXGJVm05cXKdfXHUwMDE4wFxymPwxMHf2flx1MDAxNFx1MDAwMt+ec+NGgNdcdTAwMWSOwpC7MHo/TL0jKPhcdTAwMDeQRlx1MDAxOahwxlx1MDAxNf5356XbwK9WnLLkXHUwMDAw5ft77vP23c/aRrGit9qbXHUwMDBm52dHV/lcdTAwMDYoR9SSYt7Ny1x1MDAxZlU0XHUwMDFlr0hQ3nJcdTAwMWR8XHUwMDA2XHUwMDE1+F5OJFx1MDAwM0mAmKnt9tWBmbzLzT8+XHUwMDE2hf9YXHUwMDExczGda0JX2nS4yEWzLiirlH0+3T+/rPzcvrdcdTAwMGaGJ05p94hcdTAwMWNeXHUwMDFk0Hxjzlx1MDAxNIW0XHUwMDEwXHTP6kTcYiZrksb0zchcdTAwMWV0WFx1MDAwYilme87+Qt3XRlx1MDAxZOOhKURcdTAwMDSsvzItXHUwMDFmXHUwMDEyg67bKk32qzvqx8n24KraXHUwMDFh3Mk9fZRv0HGkLKA4LV4jRj6iY1xiRKJKsDMse8yBXHUwMDFlJrPk9r8g98Uh58mK9ENOY0ZW2r1w+nNydu5Q6bpcdTAwMDdyWpveXHUwMDFkn15VT/NccjlcZoCyiGJcdTAwMDArXHUwMDEyxHPMQFIqs5fns3lcdTAwMGXIXHUwMDE4XFxx6vlKf2Hu17zNJeZcInJlw7uyUdOTzZRcdTAwMDBKTnSR3WjziTrkaWj2smzpeSPVfUGesUQmqUtcdTAwMDHY99a4yk/ax9lBvcxcdTAwMWHHJ8/utLi1be9sXtTO59W0XHUwMDE2ptzKma9g2ryezCfWqTNcdTAwMDVcdTAwMGYpXHUwMDExMPkpjFxiyYBdNb6m3lx1MDAwNC/d10RcdFx1MDAxZatkopjN2HBNRbE0q3XLfZeERTQlikpcdTAwMTOIgcEvP+uvlMux4cHL7Hfi+ZNv3v+vKjRkqM7ASlx1MDAwYmQ6XHUwMDFiJ9/KXHUwMDFjjYhcXJo8rU1cdTAwMDE571x1MDAxMuyL3Vx1MDAxM9nYPYGt4DJVy7aPY1x0M5zKz0vLMGaIKr3uitBcbv1cdTAwMDXG9ui2up/R+k9cZiEvdVx1MDAxNvBcdTAwMGYm22ZJKlR6XHUwMDEwxJU2O92T79OJ7lifSyAua1x1MDAwZkKywSBnXHUwMDAx2mNB6LyKXHUwMDBmU7xMIIbyKD72m/SifXu3WT2onYlcdTAwMWTSvDm/QMlaK8eLXHUwMDBmpcxG2nyIXHUwMDBmxrQ02aZYwXQglHiyXHUwMDE4U5ZcdTAwMWbJ6+FiXHUwMDBiwZVm6+XgemkpXHUwMDAzq7AopqkkL0uXVMilQX0p/eHBjHlcdTAwMDFaUtJcdTAwMWZcdTAwMTH9j+BxYyGwSG72okGRS7OXh/5HnFnBbVSWl7hNgSmTW5pG4fD3l4TDINg+UJV8QNejXHUwMDE4ws5D1yOMRHhGisloZoqh5FjFR2qMx0e1jr5+6IuqXbzrN87yjVXs/chr6caMwiNUWYxQk0aOXHUwMDEw1UpcdTAwMDVtXHUwMDE4JqYsNTY7/6UpKcSWivxjcH2lUFx1MDAxYaewwy11XHUwMDAx03tonlx1MDAwZVx1MDAwZapu7exAbNQ73ctcInuepiRgNFx1MDAxMsizXHUwMDFk4VNcdTAwMDVcZrh7jCqFhFx1MDAwNFuO1LJUkFx1MDAxNueCwVx1MDAwN0x/XHUwMDE3pIlauq+J5EtwhdrAIVx1MDAxMZAvXFyIWctcdTAwMTZEsFxm2MlcZppKYS04oVKButLLmupLyZdwPJnXMpJSXHUwMDEyN+F1b00heVO+b4XCt9GAyaXBzEPhW5PtT1x1MDAxNrdcdTAwMTRGXHUwMDA0XeCJmG5eabh875c3XHUwMDFhnJmP21x1MDAwYpNlxdtcdTAwMTiST7Pi7bdX9H+3R6OaXHUwMDBid/TNVsIj7LR82cnfXHUwMDFmOs7jZvg0+vZcbnZcdTAwMDMrZ8Z/f3778/9cdTAwMDCARWMpIn0= - Graph DB(neo4j)3rd party containeror systeminfrahub open-sourcecontainer or systemLegendinfrahub-bundledcontainer or systeminfrahub-sdkinfrahubctlbrowserinfrahubGit Agentmessage bus(rabbit-mq)cache(redis)repository store (NFS or local)git repoLoad Balancer(HA Proxy)infrahubAPI Server*System needsbackup**object store (s3 bucket)* \ No newline at end of file + infrahub-sdkinfrahubctlbrowser(frontend)InfrahubAPI ServerInfrahubAPI ServerGraph DB(neo4j)Message Bus(rabbit mq)Cache(redis)Repository Store(NFS or local)Git RepoLegendinfrahub containeror systeminfrahub bundledcontainer or system3rd party containeror systemTask Manager(prefect)Task WorkerTask WorkerObject Store(S3 bucket)httpsGQL(cypher)AMQAMQREDISNFSgithttpshttpsLoad Balancer(HA proxy)System needsbackup****GraphQLGQL(cypher)REDIS \ No newline at end of file diff --git a/docs/docs/media/infrahub-readme.gif b/docs/docs/media/infrahub-readme.gif index d19b082383..d0d9b2d8f3 100644 Binary files a/docs/docs/media/infrahub-readme.gif and b/docs/docs/media/infrahub-readme.gif differ diff --git a/docs/docs/media/infrahub-readme.png b/docs/docs/media/infrahub-readme.png deleted file mode 100644 index 0adbea45a8..0000000000 Binary files a/docs/docs/media/infrahub-readme.png and /dev/null differ diff --git a/docs/docs/media/overview-interfaces.excalidraw.svg b/docs/docs/media/overview-interfaces.excalidraw.svg index b68036834d..c14395f270 100644 --- a/docs/docs/media/overview-interfaces.excalidraw.svg +++ b/docs/docs/media/overview-interfaces.excalidraw.svg @@ -1,6 +1,6 @@ - + - + eyJ2ZXJzaW9uIjoiMSIsImVuY29kaW5nIjoiYnN0cmluZyIsImNvbXByZXNzZWQiOnRydWUsImVuY29kZWQiOiJ4nO1daXPiSFx1MDAxMv3ev8Lh+bJcdTAwMWIxaOo+JmJjw1x1MDAxN1x1MDAwNrft9n3NTDhkXHUwMDEwIHOIXHUwMDE2wtiemP++WbRtXHSQXHUwMDA0XHUwMDE4ZMvu5YNcdTAwMGZJQKqqXr6XWVmlv7+srKxcdTAwMDZcdTAwMGZdZ/X3lVXnvmK33KpvXHUwMDBmVn81x+9cdTAwMWO/53pcdTAwMWQ4RYb/97y+X1x1MDAxOV7ZXGKCbu/3335r237TXHS6LbviWHdur2+3ekG/6npWxWv/5lx1MDAwNk6791/zc99uO//peu1q4Fvhl1x1MDAxNJyqXHUwMDFieP6P73JaTtvpXHUwMDA0Pfj0P+D/lZW/hz8j1vlOJbA79ZYzfMPwVGigkHj86L7XXHUwMDE5XHUwMDFhizlFlCuC2MtcdTAwMTVub1x1MDAxM74vcKpwulx1MDAwNjY74Vx1MDAxOXNo9VCvXHUwMDFmN1x1MDAwN2qAd52d2+ZOaefgYOMg/Nqa22pcdTAwMWRcdTAwMDdcdTAwMGatoVk9XHUwMDBm7iY811x1MDAwYnyv6Zy71aDx3G6R40nv8r1+vdFxeub2w1x1MDAxYvG6dsVcclx1MDAxZcwxhF6O/miD31fCI/fwXHUwMDFm18rCilx1MDAxMqkklZjp8GbN+5WWluJcXFx1MDAxMCVcdTAwMDXBROMxuza8XHUwMDE2dFx1MDAwNNj1XHUwMDBiqWmHsdCyXHUwMDFiu9Ksg3md6ss1gW93el3bh+5cbq9cdTAwMWI83zFCXHUwMDE2wvBN/OVUw3HrjVx1MDAwMM5JaWGtXHRWMjzZc4a9gDVThFPKQsvMt3bL1eGI+Ctse1x1MDAxZsZS2byl02+1olxy2Kk+NeDzyFx0x1x1MDAwZX068k94W+b6rfExXHUwMDE3XHUwMDFkd5HhsPlcdTAwMWSdXzhcdTAwMTeqtMH2blx1MDAxZGenudO52Xq59ZFBXHUwMDFhOPfB6suJf57+XG7N73er9o9xhyXRXG5RRrhmYVe23E5z/N5aXqVcdTAwMTlcdTAwMGXVL5FcdTAwMWKZwMjw6+PgwVhcdTAwMTI8XHUwMDA0RYpLScnM6EhvjlxcokNgZnFMMZGmzTHlI+jQWFmac0JcdTAwMTVcdTAwMDbsZIlcdTAwMGUspEU510SANeCzlJpEXHSdXHUwMDAwXHUwMDA3JYqAVVgujo2RXHUwMDEzXHUwMDEzIFjmOFxyrfI6wbH7aDqFqJGjRbvttkxcdTAwMDeEdzxcdTAwMWPA0IpuXHUwMDA3bqbRvyn0qs3VkbNrLbduhvRqXHUwMDA1rHb8kdFcdTAwMWW4wC0vXHUwMDE3tN1qNcpcdTAwMTZcdTAwMTX4StvtOH55XHUwMDE2J+/5bt3t2K2TVIugXHUwMDFknNJzz2Er4vVu7J5jzprbXHUwMDE2qaBNJTZKyPjRZ+QqzFx1MDAxOSfzIPf0+K5CiofBltpql1x1MDAxZarXrZPb86t8I1dcdGhVXGb3+TF5XHKAKyX0XHUwMDExzSGtnaP+hq49NPaPyrRZ2Hy4L1x1MDAxNzZKXHUwMDFmitYo4knggKGCXHUwMDExgz5BM6MjvT3yiVx1MDAwZaUspVx1MDAxOJWMYqNw8VxiOt6Q11x1MDAxOLOE5JpSRaXSSJBJlMTwXHUwMDFhXHUwMDAz7YFcdTAwMTDRP1x1MDAxZq9VglZGtDbFxyfR2ohB2bNcdTAwMWHV40dfgFx1MDAwYlx1MDAwM1nAsFx1MDAxMLNcdTAwMDP3sbDtdYvXa0f1WuVx56KzvX/ZPsk3cDGoOUtJxTUmQ15cdTAwMWJVpEpcdTAwMGKLzYTcXHUwMDFhUlx1MDAxNYSy4bWI73xcdTAwMGXTXHUwMDEwiGgmpMxjmFx1MDAxNnhr3EZF5q+T6mH122Hxptj/YHzGXHUwMDEyxVx1MDAxZSaCwlx1MDAwYpPZs1x1MDAxOOntkVNYMGFxhFx1MDAxOCVCSFx1MDAxOPxilNBcdTAwMTB+I1hgRixcdTAwMTjlXHUwMDFhXHUwMDE0XHUwMDA0VVx0gZqchIdgXHUwMDFhwjqBliD3Plxuo9343qDn+H92/lXz4UqnU/13Rsw2xc2PM9uTYStxds1EcFx1MDAwMr+e4DBcdTAwMTLjR5+RLIXgclx1MDAwZVx1MDAxOCNSPyX23pFzey6YOPvau+itXeRcdTAwMWLGUlJLgP5+JrfRoFxyY1x1MDAwMqpVYKaeYPwuQVssuSnKkOJ5JDdcdTAwMTmcblx1MDAxN+92e8fniqw93F1dVIqD6kzk9mvax9pcdTAwMDdl12vUN+3LtVwi4/0ttP746Md/rO1cdTAwMDOg5iNNJlx1MDAxONdR0GVBmpomIY1gsEFiydTMYEtv5nyCTStLQFx1MDAxMFx1MDAwNW6fU81cdTAwMDRcdTAwMWRcdTAwMDNcdTAwMWKlb1x1MDAwMzbMkYU1hoCTXHUwMDBiwVB8pmRcdTAwMDJ0hFKpOYtkTj49Y5afQq4/O2tcdTAwMDfllWPHv4uy4lIpc1xud4xTZrplmZNmRGSNq18hXHUwMDA0cFx0XHUwMDEysyN5t7P+oMol4lx1MDAxNlm7XylcdTAwMTb2j1x1MDAxZndZzpHMsEWx1lx1MDAxYTxWXGZtXCJcdTAwMDB6VP0uguRfasPXopRcdO5cdTAwMWSsySNfVspcdTAwMDc7VdRwXHUwMDA2XHUwMDA3J/f0au0hUI9cdTAwMWJscb5s7G+L663i6aayu3t9VrJrlPaXxpdUISajeMuCL5Nnylx1MDAxNVwiQjFGZs+8pLdyPkEmhEWomSonKI4uXHUwMDExXVx1MDAxYciyoEtcYkw1jFx1MDAxNfFcdTAwMTPNXHUwMDA1viFfTiGNnPEl5clZVE6lxIjrMFxmnTo5eFG6lqR0duht3dQqXHUwMDE1eo9dv5VvLFx1MDAwM0SlxeAuKdNqMotcbkE4t0BcdTAwMTkzLpRKXHUwMDA188LpXCKgZi2YmYnhyLxCr1x1MDAxMp0l5FpcbkOtY5c8Y1x1MDAxYnFzJzySXHUwMDFiyFx1MDAwZp96e+tHa5f8Tlx1MDAxNFx1MDAwNe2VXHUwMDA320H/yLNcdTAwMTfn08OvJzu9TVkvfyVn5PvxYL9kN9qz8Wnq53YvXHUwMDBl1nona93BVadWXHUwMDFm3DhV+6B9vCyeXHUwMDA2euCCRXGeSTJYJE9uXCJcIrniclx1MDAwZXSn918+0c0wt5hEXHUwMDE0QldMYtCthGVKY1x1MDAxNEeYZIpuQixcdTAwMDXOlspcdTAwMTiOnpzWxJJjJoVeXHUwMDA2jl/P0VoqMY+WXFyMo7fdYOXI6XpZTWqmc9M4L09ak/mMpoxcYvcxtHIkqVx1MDAwNFx1MDAwMppcdTAwMWSsXHUwMDBmduFwgHaCnfLj6VapeVx1MDAxN5TtSFYul2A1lVxiXFxwozuGKd/RiVx1MDAxYlxmMLVcdTAwMDQ0gUBTsIq1xFx1MDAxNfJmKV8twFx1MDAxY1xie5ZQgrB0yt0+r5Km3nhcdTAwMTCa7WHmndVcdTAwMGXrl7eLU+5D0Fx1MDAxMsVW3dm655tqbWfnvoDuXHUwMDFll0C5maaSzdDCJFxu50xC42TKZVxcwfhVc1x1MDAxNJGnd18+QayppY2CZcC4VCg6XHUwMDBlYvk2IMZKQ2zMNVx1MDAwMW5PqrmLmb6BeF5cbnjvO9Lu3ON0Mdo9sXvNlT27Y9eHM7Bd36lcdTAwMDFJZTVcdTAwMDE7hZbGWXiqcctcYpB/uJJcdTAwMTgsg1x1MDAwMEqUz0ZZSqr07JWz67flS7/5aJ8+tLun9a2tXqV2Uc43mKWiXHUwMDAwXCKlXHUwMDExjauc1VpYXFyhXHUwMDE5SilcdTAwMTRcdTAwMTOUq9dlk5m2KGJCxVx0Z00sTsfgXHUwMDFkQllrTlx0W8Ks0CQtk8Vouae3K1xyXHUwMDAwQnXztsbOr7yGt3/pvleZUaTXbD9YdztVt1NcdTAwMWZ/i9OphmdcIu3xtOZqltTX0Fx1MDAxNVX6pnGQhVx1MDAxOdXcMFx1MDAwNDeOXnJcdTAwMTm5qm53Td9alOphNiT6XHUwMDAx7r1TPfDcTvBk4EQ/tOxesOG1225cdTAwMTCMXjl6m2tcdTAwMDbzXHLHnuh3uNHouXHn0DWfONq/f0S6XHJF+1x1MDAxML38/devsVdPXHUwMDBlbfOKXGbq8Fx1MDAwM75Ef8+rSZLr/5lcIppSxGf3YulDN59eTDJLXCJTQo/hdjlcbr/5x/tNaM6kwoJMWVx1MDAwMLCIXHUwMDFiM1NcdTAwMDZKaqZcdTAwMDRSZlxch670xZuxXHQ9os2SXHUwMDA1oT5+dTNcdTAwMTYjR9OyXHUwMDAwvt1tXHUwMDFj7v7ZOdo6PslIhExh4olUQLxJM0lcdTAwMGbKXyc9KEmcyJZcdTAwMDRGIZtcdTAwMDez6TOOucSs5sRkw1x1MDAxM9bsaPCRyEx8PSXls4EsuFxyzFx0ZbE5O7BcdTAwMDC8XHUwMDA1XHUwMDFhSeg9S1x1MDAwZlx1MDAxM0NoXHUwMDAweSaT2lx1MDAwYkqPq/2tbVq6ln7zZKPU8Pe3u+T09CeSXHUwMDFlXHUwMDA1ZCFmNFx1MDAwN8NmwVx1MDAwYoN+XHUwMDEykct+aFx1MDAwZkwtyUZcdTAwMTZjfTrtUYhcdTAwMTnd5lx1MDAxNVx1MDAxOdfLUlx1MDAxZsnFdVxiOlx1MDAwMIkoXHUwMDE5TnNl6cM3l67s48pcdTAwMGZwZFx1MDAwMFx1MDAwMa7Fh6+sy5f+mMLHudBcdTAwMWY4eVx1MDAxOVx0pVx1MDAxOFx1MDAwM2zx7Fx1MDAwMkTcXHUwMDA2svS9dnxwdHVbbKDWqcN6OU99YFx1MDAwMKZcdTAwMDX+XHUwMDExXHUwMDEzwmLqXHUwMDAytGZcdTAwMTbgXHUwMDE5oleaoVx1MDAwMsGKTiY4olx1MDAxMkSjMX3yhFxcqbhcdTAwMDBcdTAwMTWCMpmTWFCBrO/32MVdZ/ee8cv+laqr777Pfy5cdTAwMDWCKeZcbknEqKBSXHUwMDEwXHUwMDE2o0BcdTAwMDDQ9FMnP1xucYPbvFwiw3pZXG4ksVxcUVBkSmnmXHUwMDEwIOmjN5eu7ONcbpDhqnSzs8z/9ccy9cdcdTAwMTQ6fkP9kb4hXHUwMDE3Ta5eokRC9IDU7MDl8mr/23Gh6O3vbF3tXHKqZ5WSXHUwMDEw+Vx1MDAwNi7cXHUwMDFmspghXGJcdTAwMTmnQTDQiCWYXHUwMDEwjNH02sR32rmEcC4k4suYTV16ZUT9yD0rt4+uTjZ6bd7lwVx1MDAwNb/pXHUwMDFm5rcyXCLbYkSBUfaVXHUwMDExgiTPplxuRSghs6/MSe+9nIJZc0uA7NNcZlx1MDAwMyZcdTAwMTDWY2DmxJJcZiBGf+xDlFx1MDAxOZix0GBcdTAwMDdcdTAwMDQuXHUwMDE4vkhoXHUwMDFlU2c8WZDIlFnVsZRcIqdcdTAwMDVcbiPmXHUwMDFjpnFMPGdhxLnnNzNbKjCFkmKLIcZccsq8KlHgxFx1MDAxNVx1MDAwMkQqs1x1MDAwYt1cdTAwMWNrY53TMsG77LSjjjt3XHUwMDBm98e7zc3vOV9cdTAwMWKLITqAgI0wnUDCiFuAJYWQeJ8ldWlbYjKliYA3LmGpz9JcdTAwMTm4rNa736rV7/bR4Lpw2ve9wuVcdTAwMWV6V1x1MDAwNs5cclOmJN80xERcdTAwMTDEzVG2n97OOcVcdTAwMWPXXHUwMDE2xYyyXHUwMDA0riTSQnBcdTAwMWVPq1wifFx1MDAxN67EXHUwMDEywlbOKXnPtPmnI8sp1PFmZJmYMseCp+TMkaJcdTAwMWPocvZ1semKP5+wNdW9hDCBOfxcdJ5qXGa1kkA0y6FcdTAwMTkkQ1x1MDAwNEVcXNgyXHUwMDEzTWBcdTAwMDLm3LA2x1xuy1xiXHUwMDA2XiDLlYVcYiFcdTAwMWE8qYJcdTAwMTA7slnLy265iFFcdTAwMWF1svlJn3895Y/t6rdL75vb2C24Z1x1MDAxN1x1MDAwZpeovqT0+eudxlj6PClJnq54V6JcdTAwMTWCQlx1MDAwYoXN7DxETEKHdbYrzylyYUlKJCOCYWlWb9Lox0xNlc+U009fMzRirlwiXHUwMDFjgVx1MDAxOViA99eEq1x0cymMOVxi7DhcdTAwMDOdqDRV85n7kTL7iVxiNK/CXHUwMDA0+MKP+1x1MDAxMv09t2hK3C2LgFxyWPE59stKx1g+fS/j4Hs5QVxcQsMqXGJVRp0vZcJCXHUwMDEyxKOakl5YxPdcdTAwMTJsXHSKtTRb74lhvDLpfMlEllx1MDAxZt6ChttGvKdcXFwiOMpXWWf5626QkUyaXCJcdTAwMWLGZdKIITPJI/zKilx1MDAwMqySy5BBLCBcdTAwMTiYeI7ljalRXj5cdTAwMTGKiLa42c1cdTAwMWLuVZhcdTAwMDE/ilBcIqhcdTAwMDUxPJ66NGrB1Vx1MDAxNFxuI1xunCpcctnL0EuE03DaXHUwMDAykaZ4XFxpoybIrPWgeawroPXibsU7XHUwMDBiRGvdvtBnvHrY2zhamjB67ZqsXHUwMDE5hVH6uqiV0epcdTAwMDFcIkFDa2aEj6JcdTAwMTRNrp3AXG56mcxXOzCTIJpDv0mpXHUwMDE41TBcXMzWbITyXHQjuUXk5y5xSFx1MDAwNJt5XHUwMDE1ojhbklx1MDAwNkouXHUwMDFhx1xmSYSEmKNqPFx1MDAxZE+5dLFcXGJLXHUwMDEyxoDOQYTQsUJcdTAwMDdCtYXAuWbrYJklTMkk4E9cdTAwMGLKVEhpKVxuyOxWTkEzv6NcdTAwMDB64zKH4dO/MpJAU7TBuFx1MDAwNFx1MDAxYTMlW1x1MDAxMcRSNijkWEom53jaRPpy+VxcXCJUKWHKzlH8ulx1MDAwZUxMXHUwMDA0wYBcdTAwMTembZ22XHUwMDEwRDG4XHUwMDAx+Fx1MDAwMs0kXHUwMDAzfoqUJ0Q1XHUwMDEwQWp0+eHLVKhkXGbCKZ5HXHK0VvabXHUwMDFisl4v3O6oi+L6o9dpktnmVHKggdJ3XHUwMDFhXHUwMDFk1UBcdTAwMTBcXFwiXHUwMDAyWEFgXHUwMDE5RFx1MDAxNjhy1ZNcdTAwMDRcIpbMRlx1MDAwM80l1SjEwVJIanJYXHUwMDEykUlcdTAwMTWE8fi+QJ9PXHUwMDA2JeFteDKCtCWpIJ6YXHTCSkgtXHUwMDE4m2NcdTAwMWaddETl0sdqs8mrhniayKFcblx1MDAxYX2iXHUwMDBmxoybwnZGQKdcdTAwMTCzXHUwMDE5ZH50kFlcdTAwMGXEOFx1MDAxNT9RvWeWQmiKPnhcdTAwMDMhlL73JEtcdTAwMDYqNpO/Yp7CbOdGXHUwMDFkfOPfmien275zcubvk+u7Xr6BXG5cdTAwMWFHW4yGT1xcXHUwMDFkf4KPtIA1YECKrFx1MDAxZlWyjL0nTSWMkEyoTHbceHWxSSxbRMZNZeNM2Fx1MDAxMpe2uH33iNlNa6+xc/FcdTAwMDZ7T05cdTAwMTdcXExoXHUwMDFkkZ+Z7Vx1MDAxMZn4XFxXXHUwMDEwL0KY3MXMIIxvzHyDkGFlwVx1MDAwZlx0d6uojpY4vjxcdTAwMDBPh2WZXHUwMDE5glx1MDAxMGJcdTAwMWbOpCZm8Vx1MDAxNlx1MDAwM1x1MDAxYcSTIIwrNkFArlwiatg7cKaUlM2zm2lcdTAwMWNnzl5sUneDlUrLXHUwMDFkacXl1pqkc0nMJMqEPVmXmiRTJ6JacDVcdTAwMGZq071YTlErhUUlkvHUqU0hXG5WXFxpXHUwMDAyQKI8m1JcdTAwMTNsUY2leeQ4XHUwMDE1XHUwMDE4XHQsYjJcdFx1MDAxMllcZlS25lJA7Fx1MDAxMVsspoXZhzaTXHUwMDEyzVx1MDAwNdNcdG1SuKLN622/Vr6wT1x1MDAwN3uqWvLvlpVOoIigV+0uO2M6IVx1MDAxZMQr0blcbmON4mZcdTAwMTZSQcxcIiOjaSVcdTAwMTKna0LNPqxcXFx1MDAxMST4yKzL25ebICzNw+5cdTAwMDVcdTAwMDUgMKJjplekRdXwsXOfOLOQXGZA81x1MDAxYYde+Glfor+XV2hcIkxcdTAwMTFcdTAwMTBVczxhJlx1MDAxZGA59bxcdTAwMWa20Fx1MDAwNJulvqY28T3zXHUwMDBin6bSZIpqWF6lyZcn/K/a3e5xXHUwMDAwrfniQqH73OqzfHzq7tU711x1MDAxOawnj6UvT4g34HKGrPnPl3/+XHUwMDA3XHUwMDExXHUwMDFkXHUwMDFhyyJ9 - infrahub-sdkGraphQLGraphQLRESTinfrahubctlGraphQLRESTbrowser (frontend)infrahubAPI ServerGraphQLRESTinfrahubGit AgentGraphQLRPCGITgit repogit clientGIT \ No newline at end of file + infrahub-sdkinfrahubctlbrowser(frontend)InfrahubAPI ServerInfrahubAPI ServerGit RepoTask Manager(prefect)GraphQLRESTGraphQLRESTGraphQLRESTTask WorkerTask Workergithttpshttpsgit clientgit \ No newline at end of file diff --git a/docs/docs/media/overview.excalidraw.svg b/docs/docs/media/overview.excalidraw.svg index c861ab2ca0..39666a3bc7 100644 --- a/docs/docs/media/overview.excalidraw.svg +++ b/docs/docs/media/overview.excalidraw.svg @@ -1,6 +1,6 @@ - + - + eyJ2ZXJzaW9uIjoiMSIsImVuY29kaW5nIjoiYnN0cmluZyIsImNvbXByZXNzZWQiOnRydWUsImVuY29kZWQiOiJ4nO1daW/i2pb9Xr9cdTAwMDLV+9ItVXzPPDyp1cpcdTAwMWNSmeekq1Uy2Fx1MDAwMSeAiTFJyNP9771cdTAwMGZcdTAwMTkwYFx1MDAxYlx1MDAxYnDi2+9GureqMMHbh7P22vP517dK5Xs46Lrf/1n57j7X7ZbnXHUwMDA09tP3XHUwMDFm5vVHN+h5flx1MDAwNy6R4b97fj+oXHUwMDBm39lcZsNu759//NG2g3s37Lbsums9er2+3eqFfcfzrbrf/sNcdTAwMGLddu+/zf9cdTAwMGbstvtfXb/thIE1usmK63ihXHUwMDFmvN7Lbbltt1x1MDAxM/bg0/9cdTAwMDf+Xan8a/j/iHSBW1x1MDAwZu1Oo+VcdTAwMGV/YXhpJCDGXGZPvnzgd4bSYqowXCKaKvTxXHUwMDBlr7dcdTAwMDE3XGZdXHUwMDA3Lt+C0O7oinnp+7FeO71/Uk94z929u9/d2T06Wj9cdTAwMWHd99ZrtU7DQWsoV8+Hx1x1MDAxOV3rhYF/7156Tth8X7jI60m/XHUwMDE1+P1Gs+P2zPOPXHUwMDFlxO/adS9cdTAwMWOY19BI+NdF+Gdl9Moz/ItxZTGkXHUwMDE1w4RKzDT7uPr2+9hCXHUwMDEyMUk4JphoPCHYut+Cr1x1MDAwMlx1MDAwNPtcdTAwMDe51S5jI9Fqdv2+XHUwMDAx8nWcj/eEgd3pde1cdTAwMDC+sNH7nt5cdTAwMWVcdTAwMTlrblx0iZXk6PWHfryl6XqNZjhcdTAwMTR2dHt3+C1gzVx1MDAxNOGURr5Gc9Nu1Vx1MDAxOW6J/1x1MDAxZK19XHUwMDAwm6lqfqXTb7WiXHUwMDBi2HHeXHUwMDE28H3rjDZcdTAwMGZ9e+XP0VOZ929Obrroxotsh41cdTAwMDd0eeVeqZ11tn/nurv3u53a5seTj+3S0H1cdTAwMGW/f1xc+PPtbyPx+13Hft13WFx1MDAxMq1cdTAwMTBcdTAwMTNK4shcdTAwMTK1vM795LO1/Pr9aKt+izzIXHUwMDE0SIa3j8NcdTAwMDfSXCJcdFx1MDAxZlx1MDAwNCGtXHUwMDE15ig7PtJcdTAwMTeknPiQxOJCwnMyhSimTE5cdTAwMDJEfFx1MDAxNkCEtCjnmlxijqlBippcdTAwMDZcYp1cdTAwMDJcYiWKgFRYLo6PsVx1MDAwYlNASNmrSFxiyvPs1ZFUfic89V6G202Nvbplt72W+Vx1MDAwNkZPPNzEsIpeXHUwMDA3XHUwMDFlptmvrfSc++9jV1dbXsNs6+91kNpccsZ2fOhcdTAwMDHBfLyh7TlOlDLqcEvb67hBNYui91x1MDAwM6/hdezWWapEsFx1MDAwZe7O+zeHLcIjW6PnmqvmsUUqcFPZTVI0+epcdTAwMDe5aYaoQIyPkDRcdTAwMTO8+079JWyc/Pbua7XQv31+auOSg1dcdTAwMDB4teZSKlx1MDAxOU9uyKKwXG5aXHUwMDE2jl3JLKTpXHUwMDA3t2UkN6EpvJWTkdzlIbd90T8n9uHlmmg/oJeOLXjQsZdEblxuXHUwMDE0XHUwMDFjVVGgXHUwMDE0QW5cdTAwMTIn2n5KU0Ekz2H6pS9HOdGhpCWNJSFcdTAwMTRWXHUwMDA0RUyot9/nn4VcdTAwMGXGwPTjmlJFpdJIkGl0TDNcdTAwMWIjXHUwMDEySaBdsjg45ma23Fx1MDAxYnU5zFZcdTAwMGZbXHUwMDA1XHUwMDEx21xmJZ9EbGNcdTAwMDJcdTAwMTXOa1x1MDAxOJNcdTAwMTSrlIFJXHUwMDA2Ns9oUWdB96W58+TX6lx1MDAxNy/e0/pt8FBzXHUwMDA2et8rN3QxXHUwMDE2lmKSvnttXHUwMDExXHUwMDEwmFx1MDAwZlBSWJpRjsF7lanQvUWqjtBcIsRGLKpe/bZp0Eb051x1MDAwN6VxXHUwMDEwXHUwMDFh68hcdTAwMTOWh9JcdTAwMDbYXHTOn+6rXdnzSfdku9q1T8+XQ2lUaiYjXGZfmL+GeVwiMlx1MDAwNNeY5OG09PUoJzCYsjhGkmottYlcZozjQuPPwlx1MDAwNdJcdTAwMTbiWlx1MDAwM3FcbiaYYmxcdTAwMWFcdTAwMWR8XG5cdTAwMWQgtNDAaUuw9+alNE6Q0Fwiz0aNpTQ09mpcbqVVXHKDVPZcdTAwMWLt8FfnzPdbvYKIbYaSnyS2kViVXHSpMrFcdTAwMWJj87ObXCKJXHUwMDEw1uCwXHSgt+xcdTAwMTAmzt320Sa9+Ln91Fx1MDAwZXZrLzveQ71XblxiU00tsL61XHUwMDEwsVx1MDAxMUks4LImkoLtZTC8iFX6j9vhzzR8wU+zXGZq3z22SPjkXHUwMDAzvphrS2nGKSaxYUljhLAyem6oe1V72qDb96jWvpWnpzs3V09H8TRnXHUwMDA3gf9cdTAwMTThuVx1MDAxZmmf2zukW1x1MDAxYlx1MDAxN+i4i8XBJtpTx2d7zbtM9Jn6sfZR1fObjVxy+3p1i/H+Jlp7eVx0sok7k5aZ1JwhXHUwMDFjXHUwMDA1d1x1MDAxMbRMkpNcZoiAn8U0XHUwMDE2mSGdvsylhDSX1JJcXFx1MDAxMlxmzKs5/G9cdTAwMWPShCCwZk1URFxuUlx1MDAxNKQxXHUwMDA1uFx1MDAwMiNTzcGh1TxbXHUwMDA0XHUwMDA2yFx1MDAxOFx1MDAxMVA5YnFcdTAwMWPP7WRcbkYwZjn2aFx1MDAxYyNHniCjk1lcdTAwMTBcdTAwMTHPYKQkXHUwMDBmMydcdTAwMDHTdFx1MDAwMn5VXHUwMDE0sa4lS3QtXHUwMDE1wpowzLNDtbPx0Do61e2nrng+Wr9C3t79QcnzgVx1MDAwMuiVXHUwMDEwQpPygVxcWVx1MDAwMpuwpEhnX8VcdTAwMDTlaj6oUpNcYuTSXHUwMDE4z0opglWca8lcdTAwMTPJlzJcdTAwMTOzXCLLwOw095LZ3JuatVx1MDAwMydsLuO6XHUwMDE32kG45nVcdTAwMWOv01x1MDAxOFx1MDAxN+wtXHUwMDA1niUxMVx1MDAwNH+9b6RcXDF5XaJcdTAwMTjnYKTAt1xuokXe1bC7Q8VscVx1MDAwNl+AiizycNs+u86R73XCN9mnlsTtOLNFTVdcdTAwMDRcdTAwMTFRkYVcdTAwMTQnSnJcdTAwMGVcdTAwMGUslUagKVGBQfKJ2LJ74brfbnthOP7O8Vx1MDAxNV81iqLp2lO7XHUwMDA0XHUwMDFlMXptUqN0zSeOW2Kjv1VGmFx1MDAxYv7j4+//+yP23cl4MD9cdTAwMTEkjD7oW/TP/FpQUzb58rtcdTAwMTbkXHUwMDFhaFx1MDAxYUmWPb7mUXfb3WmsNladi42djeCOrpCS+yCMUlx1MDAwYlx1MDAwYik4fU1cdTAwMWONIPv6+0paWitwlZUqTFx1MDAwYmIqLIbBMlx1MDAwNIdcdTAwMTDuw2JcZlx1MDAxNlx1MDAwMSDWWGpcdTAwMTGNvr1pQaKIXHUwMDAwS4dcdTAwMTVcdTAwMTJoW0xcdTAwMGJKjYlcdTAwMWPJtXwtuNdZXHUwMDFiqOpcdTAwMGXxtli7X99aOTh92WOxWlx1MDAxMFlMYHA2wVxyXHUwMDAwK1x1MDAwZuzTqFx1MDAxNnlVLcrCXHUwMDA0fyTvXCKW2JeoQoA5XGJcdTAwMDJ7XHUwMDAzlDKKuFxyI6XNLU3oXHUwMDFjSvuvpFx1MDAxMVx1MDAxM7FhfqKoyKlcdTAwMTHT01x1MDAwZbDsiX5cdTAwMWOhXHUwMDFjrFx1MDAwZUSyh2aqa4fdrf3aXHUwMDFl9p3L7vVxzzm9PC15xpBQYjEwgz/U4kTaXHUwMDAxI4soSlx1MDAwMUi04PCqQJZGYKVSybT5iYnPxHhzXGIzplx0KWPuQTSCzqbvrL/o/Zurzp6NN/jvqMqqLJJ7UITlqr9JxUhy7iFSXHUwMDFiNVx0XHUwMDBmwZWUlOQoN0lfkHLCXHUwMDAzdiXlXHUwMDE4NFx1MDAwMShcdTAwMWaN0HhCXHUwMDFkzIXPQlx1MDAwN+JcdTAwMTbmWjMtKGJaKT2NjpiYJVx1MDAxMYBoJMRcdTAwMTLgMX/2XHUwMDAxXHUwMDE2XHUwMDA2zWVcdTAwMWHMlX04XGabblA59c9cblxudszQ8ZPBjlx1MDAxOHEyRTuwXoDVKNKTL3+kXGY1NlViNHvE42Vl2+9u/V49adzWX3avOttcdTAwMDfX7bNyo5ZSXGZcdTAwMTZcdTAwMTPYd4SpaVtcdTAwMWZsXGJLU6zeXHUwMDAzXHUwMDFlxaGWXHUwMDEwZYGO/KA0XHUwMDFkkzKMSaiDUYKZkLKMXHUwMDA10KG/ym20xYI14lx1MDAxYzuHx1u1rf7O4lx1MDAxOYG1u+p1cP9in1x1MDAwZtrd88bmZq9+e1VdUkaAKmPgiCj+XG4hS1wiXHUwMDEzs3zG8cRcdTAwMWOoXCI77NJcdTAwMTe6nLBcdTAwMTPcQuB8SaSUQkSNXHUwMDA3XHUwMDFhlUafXHUwMDA0O/D+LE61YkiAR89ITO3ZdKJcdTAwMWVcdTAwMDNVUviaXCJ1nF+RqddKklx1MDAxYzt1Ma48XHLh9n+s9np9WM66+6uzYYd2Qbw5g0YmeXNCtMq4ZIVn7KMtQlNYpkjBXHUwMDE2zlx1MDAxMS5LXHUwMDBmXpRcdTAwMTLLhFx1MDAwMJbBjqBgbMYlXHKoXHUwMDAyb40xPitcXLZIfo8gZIFnKkimOjTGNVW4jJxZr1x1MDAxZe06qOk+XHUwMDFknT3Tm9VBqF7WszmCX8aZTGrwXHUwMDFk8mQo5+JM8JxcdTAwMTJhxsDfkVx1MDAxY8vs/mX6OpdcdTAwMTNmQliMMilcdTAwMDRcdTAwMDP3UtFcdJQxujSUpTMmllx1MDAxNjEuLlx1MDAwMc9cdTAwMDBTovA04uRcdTAwMTTiXGJmRMNmXHUwMDExX9iJRFx1MDAxNdUg8KKUmbleu/qWvP7VOVx1MDAxZHTqXHUwMDA1ceVcZr6IrWyLkSlcdTAwMTNLXG6citzEhJLQiT4m5txk9GBXZ4Zuui4rJXSpKdhcdTAwMDbzUktcdTAwMTbDkFx1MDAxYWxhXHUwMDAz6NmhoUVcdTAwMTJKxMJcIokhwVxilkii2Fo20+fJKYnEU8uSSaJKXHUwMDEwrOfCc8ZMUrohWlx1MDAxOWVmsFx1MDAwNYpFM6G0YFx1MDAxY3EwL1wib/rIzFxm+3bfmsBcbskk5cl8IUZcdTAwMTBnVEtcIkCHI1x1MDAxMSOxiViafpzCM0mTquMzk0nTuDA/XHUwMDExRIw+4Fv0zzl7V1wiNsqUXHUwMDE1oylcdTAwMTGKRXzhmc5CSLdbZ/LwQa7hXHUwMDE3fI7bfXfjvtyqUJtdXHUwMDA1242QOFU4rO/VXFxjxmi6XHUwMDE5k8HxT1aFXHUwMDE0jWd1XHUwMDEz6ntcdTAwMTnB6j1cdTAwMWNcdTAwMTfVym+aUZFhRVgpm1lcdTAwMTYom011JIqqXHUwMDFlvlBcdTAwMGZh49K/urzc9mz8sv1cdTAwMTKsstPlJMD4RKV4UTE9nlg6iIlcdTAwMDTDXSCWXHUwMDFk2enrUU5kXHUwMDEzZVx1MDAwMSRcYqWMXHUwMDAxa0Til2/IVsUjXHUwMDFiXHUwMDAwaSltwlx1MDAxMIpcYlx1MDAxYz8lIabxxniwXHUwMDA0qa9svMm9SWN9k8zhPL92XHUwMDA3PFXphX7g/ur8XHUwMDA3fLTdqvhB5ZRWan24Tfif8/orod9NdlbS+WrSWckjZOFhPkxUYsxcdTAwMWW4hFPEXHUwMDAx4dm9XHUwMDE4uumpvVx1MDAxYdu6WVx1MDAxMe4h78q77eZqyVx1MDAwMY6VJTk1fYQxxcGm61SwUWfOQjH75OJgyS1JI4b0SMuMqFx1MDAxYuSkZGi7vdnak4Bn0tRFYPU3cy/yubGMXHUwMDE4uWPPdpm/xi82fFx1MDAxNjavz9eP3fZJLVdkkSBJpIxivJDIYvIkXGJMOUFYMZ09slx1MDAxOP/U5Vx1MDAwNjZnXHUwMDE2gFtTziS4hGh8yJFS+DOAjUxjn1x1MDAwNmCSYVx1MDAxNjwmSlx1MDAxMZeIMyPKvrZlXHUwMDE2nFx1MDAwNNBGn1e04rjdlj8wglV6g56Zllx1MDAwN9TY8YOOXHUwMDE3/KjYnZ5Xa7k/KsDRgX3rXHUwMDA37Vx1MDAxZlx1MDAxNTesW4Ww+VxmXG6bZPNFXHUwMDA1L57hXHUwMDExTU4xcME4+OY5XHUwMDE0gafvXHUwMDFiXHUwMDFiJ359p1x1MDAxZe5cdTAwMWbs3FZcdTAwMWTeuNovuVwioMziRFwiXHUwMDEyOzFJM1NaS7XBXFzRw86E9VrfmdyAXHUwMDBiwiikuP4omJlSXHUwMDBlWlx1MDAxM8PzkeRreVhcdTAwMWVd/9zv6b2TXHKydvBzvXqx3jrc+blcdTAwMWN/l5mZb7hw1lx1MDAwNLQkdonAXHUwMDBlkUzSqF6ehZb0XHUwMDA1KSdamLSwXHUwMDA2etRcdTAwMWNzqVx1MDAwNJ9Ai7ZYpEukyFx1MDAwMUrIXHUwMDEyUivONdA0+L0xpWOxXHUwMDA1n1RLQPlcdTAwMTLgMT935t2qi3HnK9usXHUwMDE03OM6Q/FP8mKiUMVcdTAwMTd/4pTWdGZcIihcXOvsibnz08c62TpcdTAwMGU31WZ7Z+D8bp3dXd6UXHUwMDFiwlx1MDAxOJbA0mJcdTAwMTSOnsTwX4rxXHUwMDAwTGBcdTAwMGJLuoSytKVcdTAwMTPeJeqv69tB8+CkSu9XNlx1MDAwNs/VlfVsxaClITxcdTAwMWOZ9TaJXHUwMDE2uEa5XHUwMDA0IzEzWtJcdTAwMTekpGghyNKcgF9cdTAwMWVXtfmJjMeRhdKrNuNcdTAwMWHDKVJcYkWCrP/v+e5j8uybe1VcdTAwMTDhzVD8ifNwp6T6jHaH5GJNYFx1MDAwMKVcdTAwMDHI2Ys18blbV85cdTAwMDU6Od7fv+TVg3vpbvbLjeFcdTAwMTnzlShcdTAwMDVkXHQkZ/Y2Yy1xncyZfyVyJtlpbJnaMvKeflx1MDAxZHHau/2K4apilFx1MDAxNzLo4d8miPtcdTAwMTareGyfuqpD+/X7u6Pw2m5sgIJfXHUwMDEyO5tBWpG5hEVcclmSyd6oYKa0Qkaa4WfGblLXo5TAZpJbgCmJXHUwMDA1xqZcdTAwMDWRTFx1MDAwMFx1MDAxYihcdTAwMTP8cSn0jPTrXCLANtNjJGBVSsbAK42tNZuO4krEwcL+0s7D/Jt0MWKu2/WmSWlcdTAwMDau4/XmXHUwMDBlzs6g5Vx1MDAxOew0SctJMlx1MDAxNVx1MDAxZnelXCJcdTAwMTm7VFAzqCZH6cTu+mCv33hqPTjPm7/PJX7CwY1TbuzOIGVmwrKMI4XLT8pcdTAwMWNMqLEv9G9Snvtzt3j7cv9e3emHQfX8Tqyc9bvXS1wiZUrAXHUwMDExKbwmiorEklx0TpVcdTAwMTY4MndqdoQ4dTlKiWvGhMWAdCmhhFx1MDAwMzQmcc2BslGGkqhFcG18dlx1MDAwNVhFXHUwMDFjpNBmKmkmTlx1MDAxNrBFxDJcdTAwMDaYzk3JlGKBPs9XboO4dsOt1PomL1x1MDAxOdi1mlx1MDAxN1baXHUwMDBmRZHzXGaWmiTn2dJcdTAwMTVO09ExK5OzXHUwMDExQZ1okqdyeTVcXKPy8njgXFxcdTAwMWV1Se1lr7ryuFby8TeCmvlLZtpMbOVyZs95ocnE2LQ54o+D0mJ6r0xcdCSnfFjRXHUwMDFlW/ykKKLYzGP9m6JcdTAwMTf43FnFT+Awar/R7Tw1j+qD1d7GwaZC7XxtlXTspIuiotoyOSBm8udE5UlcdTAwMDHFP3WpYa0wtoRcdTAwMDJcdTAwMGZUUPhPkylY8+JhjZk0jrNSSDNjXHUwMDBm4Fx1MDAxOL85JoMrMOhcdTAwMDL6tdVPuXfpYizd73i3nutcZmuCgVx1MDAwZosoa5pBTZPMnChR8cFsgVx1MDAxMpuJXHUwMDA0WNbETFx0zFxm3f1ms7mz5qygXHUwMDFlWf3ZXHUwMDFlMFxcvbZRuaErmMGmQPFcdTAwMDXJXHUwMDE4fFmLKqVokW4zRtJiXHUwMDE4fdAtiuslXHUwMDEy2npcdTAwMGaITVx1MDAxNC2/Q5lysCo0139cdTAwMDezXHUwMDE3/9xGrX12zPyNfTu4tIOfV+6Dd3+1rGC21uBGXHUwMDE1TcpcdTAwMDIl1mUoXGbbhOc5uTF9OcpcdGwuXHUwMDAx2Fx1MDAxOHBcdFx1MDAxNlxiV0iNXHUwMDAzm8FlsGLJzENAXHUwMDE2XHUwMDAxtqJmxqhcdTAwMDbcXG5pbpMtlM20ocRl5KTmZmTTf/yJg4FcdTAwMWGB3W1WnJop5nV9dje3w1xc88PQbyfS8lxmfpqk5Vx1MDAxNLGKXHUwMDBmaLPkgVx1MDAwN4Rcblx1MDAwMdubZsfvqXtcXHu8wnX74kBcdTAwMWY+N6u1weFlyU9elVx1MDAwMllSJtVcdTAwMTGb8LBcdTAwMDXWrimdTMfvQlxyXHUwMDA1poVbRFxc5ThiZsqK9Fx0ITRVUsXNcHz9t6u8+Of63vZJ/7T1++65eXDc2fDxplx1MDAxYSwrms1hXHUwMDFiiTwqby5WZiy5d5+D2lx1MDAwN0NcdTAwMTNlXHUwMDBmZ6evRylhrVx1MDAxMTWw1lx1MDAxOKhcdTAwMTdcdTAwMTFcdTAwMTZpm3qDtS5cdTAwMWXWUliSm2OEpFx1MDAwMPpcdTAwMTcqZi7+tKdMkFx1MDAwMGnNZMWv5OW8m3RBXvbCSuB2/VwiXFzkXHUwMDE5lDTFxVOiLMM3djy77XecOKTK5LpmXHUwMDA27EtcdTAwMDTKXHUwMDExqiY3x9Xn40O/22Ttjeb1XHUwMDAz2d1cZks+KkxKbCGiXHUwMDAxXCKmxJvpiVlhVFx1MDAxMctMoKFgiDCRXHUwMDAyVFx1MDAxN8iR4nljWsjiIELCxKHpXHRhXHUwMDE0IfhmRCHdOlx1MDAxOeZcZqVxV+32d2e109hsrj3uumtcdTAwMGZ7v1x1MDAwZmvVJUzlW4BqZ3Oi0ETxws+2XHUwMDAzVyyRXHUwMDEzNdNS6TylXHUwMDFi6etcXEqoXHUwMDAx11kmxKRcdTAwMTWliCk63kBcdTAwMDAg+Fx1MDAwNKhxXHUwMDA2SMNcdTAwMWEoXHUwMDExJDFZ07hcdTAwMDag6eZZxl+Hvn/p6XZCXHUwMDBisuhcdTAwMTHqOPPpdr16021cdTAwMTc1tnZcdTAwMDZRTLLipCzZOJHPy4mwXHUwMDA3XHUwMDEzq6w0R1x1MDAxYXOawymtrq1tefq3fOzJk1x1MDAxM3f7oHV7eVxc+vk0xFwijFx0JMzcuFx1MDAxOFLElplcdTAwMTDJNCqQXHUwMDE0zVx1MDAxMdSEy49cdTAwMWGqmFx1MDAwNO40NWLjolxuSYuZNLUgN9ohenxurbV/XHUwMDFmXFzs6v4lXHUwMDEy2+vHzSX5dcZOXHUwMDEzhXNYNIE2XHUwMDE5rZFcZoG3k4PC0pejnMBcdTAwMDCfilx1MDAwMlNccstcdTAwMGKRklx1MDAxM+ezwlxuXHUwMDE0XHUwMDBmXGZcdTAwMDUsSuBcdTAwMWXIXHUwMDFjz0pIrFs3jVx1MDAwYlxmtMtcdTAwMTFdRinD/FxmZvboom5ddlx1MDAwNmu4wDd26Fx1MDAwN0VcdTAwMWSVPkOzT/l2MfJcdTAwMTTNZFEra6pcdTAwMTGPK8VzlVwind2R3sb+xu9DZ4M01n5W9fpcdTAwMTEvOWJcdTAwMTVXXHUwMDE2NlxycDiWysDktlxikUJcdTAwMTeJWFwiNFi2MnrQXFyUwyz+Pl1xXHUwMDE4fFx1MDAxZH1cdTAwMWRcdTAwMWbFXHUwMDBi2oxnk8VkPOfmtFllPcxzf1x1MDAwZX5fP9Sv2NrT9UX3/Lh+dpSP07CEb6ZoTlx1MDAwM/tcInGWXHSsvFx1MDAxNoLmaIeJf+pSQ0RT2ILAWuBcdTAwMDWDMpBy0i9cdTAwMTPMolJcdTAwMTRcdTAwMWNcdTAwMDIhpm6fKsWITFx1MDAxYWpcdTAwMTTDalRcdTAwMTMt4LZf6pfl3aWLsdqw29dM/bFD2KhFUdtcZk0/SW2JQlx1MDAxNcxv0dOcpuiNgX2keI6SvLXt6vr1cVNXty7lelx1MDAwM9uD0z4u+bh0wKVljFFweuLCl4wpS5owh1x1MDAxOUtelvAlwFx1MDAxY2tEcCGpwlx1MDAwNV00dtTv7VxmVp/O11x1MDAwZntcdTAwMDHG9vXu4HRZXHUwMDA1MVxca0XzTD+bi844Tlx1MDAxZa4rwEnjSuRgs9TlKCVcIlx1MDAxNFx1MDAxMqbzinBCqClcdTAwMWWYaO5k4lx1MDAxM1x1MDAxMMHxsLnTVL9LXHUwMDA1bFx1MDAxNjd0ISbIqFxynWn1lVOGcu/Rxcis3nTr90Vx2FxmdT7V1DkhS8HUXHUwMDE1nY831clcdN6IUFGzZuZAoSOmxONg/0bxXHUwMDE23+tsdvrHXHUwMDFkuVSgOnav6S63dI1cYstMhk+IMlx1MDAwMlx1MDAxNCwuqChV6o1JQsCVK6Zpc0HuoquPd3utm6C7y8OrzYP6tr7fbWfirlx1MDAxZmlcdTAwMWZbcOpNYpwnqTFcdTAwMTcnRvMmU/P3NMNcXOSZR52+zqUkRWFcIlx1MDAxMJRzM44oJvVG8SdAbb7UXHUwMDFiM4N2XHUwMDE1+tJyXHUwMDE0Ialmi7ZXliTzNoMoPiPzlnj4lam2SMKp6dBAXHUwMDFj5zhcdTAwMWUyXW+Vk1x1MDAxMlx1MDAxNUKW0DixTUNcdTAwMTMwXlx1MDAwNZtZNrZcYk5cdTAwMTm3tFx1MDAwMdywXG5cdTAwMGJjXHUwMDE5cy5cdTAwMDR4lYwnXHUwMDFjgqVAasK+rDYlrf9cbixccjZX+iHjXHUwMDE5WOlZ7crYmVLmyF5KhkO5NXhkPHpw1OuZUsKCL4GbrFx0IUKIUZi+srRDsNKVwbjAZuaGlKbnXHUwMDBlS1x1MDAwMVtzSl7wq7RcdTAwMDRccj9cdTAwMWNcdTAwMWNcdTAwMDdPhnPJO8dRWGNXP/NcdTAwMWOslUSIvF5cdTAwMWShY/RJ36J/5taMhOBEXHUwMDBihiBGXHUwMDE1k3mOzEkviC6lXHUwMDA1XHUwMDAz+LXAPmGIvtbJT1gwZswh0Vx1MDAxOea+LHQuIDHamVxik3lcdTAwMDUjhsVcdTAwMTgwmFx1MDAxMUvzt7bzaeeBMUzBxy6fblSIiPnO1MmqXHUwMDFinbvto0168XP7qVx1MDAxZOzWXna8h3ovTtWY4/ZAXHUwMDE0iczUXHUwMDE1XGbmqI4qkrfj9qiFJpy25SnF9Fn940pcdTAwMTFcdTAwMDPqKFFcdTAwMTRcdTAwMThPmcEk01pcdTAwMWNcdTAwMTOLvo2iLfxswC9TiInAMD8rY5jIqVx1MDAxMWdcZmVGidltjYQpYs+uXHUwMDEzb8VhqyZcdTAwMWa8Q1x1MDAxNax5XHUwMDFiXHUwMDE3br+7XHUwMDExXHUwMDFjlFsnYqyZhcEoZibaKKZPXGLEyGLD2Y9SkFwiXHUwMDBmOpbYXHUwMDAy59GMu3pNY8eUpLApbahcdTAwMTWghi6jI3+uhqG0XHUwMDAzxlx1MDAxNFx1MDAwNb2TQ1x1MDAxOaZu4eQhyYnjJDinQsC3SjPv3uOjNbsuNlx1MDAxYpf6Qlx1MDAxZjflXW1wtnZZcmdcdTAwMDd2XHUwMDBisVx1MDAwNJJcbtyZuORcdTAwMTVcdTAwMTjFXHUwMDE2MmeJzVxu1S/C6cpcdTAwMWOiqWAvxqac6fSuxWD8qqXU3s9cdTAwMTmPXHUwMDEwnFx1MDAxOFJcXPhcdTAwMTidzIdz77lccjdcdTAwMWHDXHUwMDFli0e03Nvx3Z25NWZM3sm4w+Q9M8VcdTAwMWSImFx1MDAwYogkeVpcdTAwMTNHZqpcIpHZgYhEwzn+SY/5Q/X6iTz0tX9ElluXX1x1MDAwNFx1MDAxMKUpUIJcdTAwMDeNz1wiXHUwMDBmJ/8viUeSI/FYWpIkXHUwMDA04tlUXFxcdTAwMTBcdTAwMWNNLMBs/bqBLkvCYfa44Psw8MpcdTAwMDeOfnX84O2EqM+F57QolVx1MDAxOEkygZamXHUwMDA3XHUwMDBiZ1x1MDAxYzRJkpPdSnGMpMyRRENbp8HlnX5+vHFudlYvnbP12lnnL25cdTAwMDKCacZcdTAwMDRYZXRcdTAwMDZ0M8x/WL5cdIhB1yhcdTAwMDGo/6pzp77cXGJMa1xuo5hSYkZqZN7Aq1xydCVPuzd711x1MDAxN/ZWVdzt/l7dWe4s7i8gXHUwMDFmXHRcdTAwMTbaknZwSvkht4hcdTAwMTYqzlx1MDAwNpzmXHUwMDFlJVx1MDAxOUI6Opjii7gnT7/HkrinXHUwMDA2XCK2XFznVydV838qXHUwMDA3vYn01VxcXHUwMDA0cE2CskRagzeXo2ns+eLZv7lcdTAwMTe3j+u1zkNHXHUwMDFjdevX+62/Nlx1MDAxNVx1MDAxMUosZoJDmKRcdTAwMDM5w8yDXHUwMDAyqFxifEhm4nj/tvFcYi6Tj/RcdTAwMTSUXCLwXHUwMDAwVHYqulx1MDAxOez3boJcdTAwMTOPoaub1nqbXHUwMDA2R6fPy21cdTAwMTX5fCpcIpRZekk7OIWKcFx1MDAxZT+ImUFrnNCvq4/4dC6igVNcdTAwMDHsh4NcdTAwMTI4QjGyXHUwMDE0xT6JxU0qpcXLXHUwMDFj1iG0XHUwMDE4XHUwMDE5mbNwe6D0oHXsbzclulrdYs1DpMLljtD6fNxSZIrkXHUwMDAxUmJcdTAwMDZuXHUwMDE362AhVjbzXHUwMDExa5PBVGRcdTAwMTmzsv4qmO333GClXHUwMDFi+I+eM7TUXHUwMDFj91cn9Cszjlx1MDAxNy1cYrTTwlRiZVlcdTAwMDZqUyqASfJMWkVcYjezWDPDluuD+5u7tW5r83lFVzd7jd72/knZ7UWlLVx1MDAwZdhcdTAwMDXrXHUwMDE4duN0sVx1MDAxM0KWOalcdTAwMTfjXHUwMDAyi1x1MDAxMlx1MDAxNbKEXHUwMDE5Nvt+TMtoQ0dcdTAwMDBsMVx1MDAxMZPNN+ZcdTAwMDD8rlwi5St1+iSLUSWnXzHiUjPGI9w0a1x1MDAwYq883lx1MDAxY5/csNb54cnOxtXdyZrn2MstSvlcdTAwMDLmMdNcdTAwMWSJkFx1MDAwMGecyjxCMuzo+WvYqVwi8e3FMfTDiVx1MDAxMkJcbv5vRD9DXHUwMDFk/0dcdTAwMDX2kNv71XlrxFx1MDAwN91fXHUwMDFifFx1MDAxMVx1MDAwNUVcdTAwMDWqzJBnXHUwMDE5NOS2Wl63XHUwMDE3XHUwMDFmuEhpRFGaKi7yzKVqbfv4p4OuV/HzPutd2c+PjWrJZ6NjrLVFKGdEmnqjmFx1MDAxOazY0nBVzFwiokVAXGb3wJrTWFxmU1x1MDAxME9PhCffQ5HAQIgu5fShcnNQylx1MDAwNmYkpVxuXGJzRGmOMqD67Ynf23p5fOg6R4PG9tX10fauW+79XHUwMDBi7oQlYP/i+Fx1MDAxMYZATdk6XHUwMDFlv2D7XHUwMDAy2CjskmV0dyx9+1xuqUmeavG5TCiMXCIzhCadXHUwMDAwhTVYwTq7XHS1/fts7+TlufNzXTz598+76/tH/nJcdTAwMGZKL8CEQoRbiIFDXHUwMDFjP6GFXGJhUamQ0lx1MDAwMszzgrYvx1x1MDAxNifZx06YXHRjWqtldLD/VSwoXHUwMDFiXGafW7tcdTAwMWUmNetcdTAwMTZkJ8XcNldcdTAwMGLSt7ea1e92t3tcdTAwMWHCXG5+aFxu+Mo8Z2JU8PdHz31aS943395cdTAwMDD+fWi3mY/689uf/1x1MDAwN3ZcdTAwMThPXHUwMDFmIn0= - unified storageobject store (local or S3 bucket)git repo(github, gitlab, etc.)Other SoTsState / AssuranceDataInfra MgmtToolsGraph DB(neo4j)infrahub-syncgeneratorstransformationsschemachecksartifacts3rd party containeror systemuser-providedcode to infrahubcode-generatedoutput from infrahubinfrahub open-sourcecontainer or systemLegendinfrahub-bundledcontainer or systemmessage bus(rabbit-mq)cache(redis)infrahubdeployment systems(ansible, nornir, terraform, etc)nornir-infrahubinfrahub-ansibleschemainfrahub-sdkGraphQLAPIinfrahubctl \ No newline at end of file + infrahub-sdkinfrahubctlInfra MgmtToolsinfrahubOther SoTState/AssuranceDataInfrahubSyncobject store(local or S3 bucket)deployment systems(nornir, ansible, terraform, etc.)nornir-infrahubinfrahub-ansiblecache(redis)message bus(rabbit mq)unified storagegraph db(neo4j)git reposchemageneratorstransformationschecksschemaLegendinfrahub containeror systeminfrahub bundledcontainer or system3rd party containeror systemuser-provided codeto infrahubcode / filesgenerated by infrahubartifacts \ No newline at end of file diff --git a/docs/docs/media/release_notes/infrahub_1_0/1_0_ui.png b/docs/docs/media/release_notes/infrahub_1_0/1_0_ui.png new file mode 100644 index 0000000000..9cd91c9843 Binary files /dev/null and b/docs/docs/media/release_notes/infrahub_1_0/1_0_ui.png differ diff --git a/docs/docs/media/release_notes/infrahub_1_0/permissions_1_0.excalidraw.svg b/docs/docs/media/release_notes/infrahub_1_0/permissions_1_0.excalidraw.svg new file mode 100644 index 0000000000..a7effa552b --- /dev/null +++ b/docs/docs/media/release_notes/infrahub_1_0/permissions_1_0.excalidraw.svg @@ -0,0 +1,21 @@ + + + eyJ2ZXJzaW9uIjoiMSIsImVuY29kaW5nIjoiYnN0cmluZyIsImNvbXByZXNzZWQiOnRydWUsImVuY29kZWQiOiJ4nO1daXPbTI7+nl+Ryn6dcNBccvQ1VVtbtnzf8W3vTrl00LZkXdbha+r974tWXHUwMDBlUVx1MDAxNGlJNpVQ9VpO4liUpWY3XHUwMDFlPFx1MDAwMFx1MDAxYUD/59Pnz196z+3wy78+f1x0n8rFerXSKT5++Yd//iHsdKutJl+Sg5+7rX6nPHjlba/X7v7rn/9sXHUwMDE0O3dhr10vlsPgodrtXHUwMDE3691ev1JtXHUwMDA15Vbjn9Ve2Oj+j/93r9hcYv+73WpUep1g+CFfw0q11+p8/6ywXHUwMDFlNsJmr8vv/r/88+fP/1x1MDAxOfxcdTAwMWJcdTAwMTldJyz3is2bejj4hcGl4Vx1MDAwMFx1MDAwNZCIP73Xalx1MDAwZUYrNGhjSejhK6rdXHUwMDE1/sBeWOHL1zzocHjFP/VluVej9m3nas+s11x1MDAxZVx1MDAxZk9PlO7eXHUwMDFmXHUwMDBlP/e6Wq9cdTAwMWb1nuuDcXVbfDvDa91ep3VcdTAwMTeeVSu9259cdTAwMTNcdTAwMTd5Pu23Oq3+zW0z7Pr7XHUwMDFmXHUwMDBls9Uulqu958H9wa9nv0/Cvz5cdTAwMGafeeKfyLhAXHUwMDAzXHUwMDE4XHUwMDAwjfxcdTAwMDXDd/G/j4RcdTAwMDGisEpYY5FcdTAwMDSZ2MBcbq06L1x1MDAwNVx1MDAwZuy/5LVcdTAwMGKJhkMrXHUwMDE1y3c3PL5mZfia68Fj+JrHXHUwMDFmtytcdTAwMTBcdTAwMDJphVS/rtyG1ZvbXHUwMDFlX1I2ME46bY1cdTAwMDL/XHUwMDE4jq9cdTAwMWJcdTAwMGXWgZwlJSiyjv5T25uVgUz8ezj5XHUwMDFklqZN/1x1MDAxYs1+vVx1MDAxZZ3BZuXHXGb+lJ2h9OCPZ/5cdTAwMWHeln/9alxc6qKSN1wifb3wqffrfiOSst7ZuSv3KiuXpY1y6WT3Zu2oe1X58ut1f/3433D4/Xal+F3whEHgLymlcPrX9Xq1eVx1MDAxN7+3eqt8N5TVT5FcdTAwMWJcdTAwMTlDycg4I1x1MDAwMHFcdTAwMTLT8EFCoFx1MDAwNCvU1PBIvulcXMNDgVxuQCskXHI831x1MDAxMJmNXHUwMDAxPJRcbqS0iFoqnFx1MDAxZjxcdTAwMTiiqIRTjj9LXHSl5ThMUMVxISxZI7V178fFyIUxXHUwMDAwZCmjw1G1mr2j6otfXHUwMDEwaUeeXSs2qvXnkVx1MDAwNVx1MDAxZFxi70C++Fx1MDAwNr+MPL1Ur954Of5S5uGGnVx1MDAxMVx1MDAxMe9VmVJ+vaBRrVSiJFHmzypWm2Fnc1x1MDAxYdXe6lRvqs1i/Th5KHzn4cbPtVx1MDAxMkFE0ZWK3dBf9TdKr0L0dVwiXHUwMDEzRsaf/kVkaMGwXHUwMDAwwfRILXVt84CeNnbXN3FZXrnnjbPtjXwjlSxcdTAwMDSCmEmMMVx1MDAwMlxcRO7971x1MDAxM/NcYsNcdTAwMTiZxYzzZJaOVFx1MDAxMfqv15FakteyVEogMmlcdTAwMDOrlTbIaFx1MDAxNaxcdTAwMTKGwv9cdTAwMGKqRlx1MDAwNShJ8iDGMEtSglx1MDAxMVxuh4NbXHUwMDA0Lqvs32yfa71ll69vj4/1TulAdZ5zyGVcdTAwMDLwXHUwMDE1Y09cdTAwMThFQpqpIZJ817mGXGJzR+CsXHUwMDE2ilx1MDAxY6FCbUchYlh2+Zo1fibeXHKRNDJTJuCpdsZpQVJcdTAwMTkrxlx1MDAxMTJOZlpqieBcdTAwMWO+XHUwMDFmXHUwMDE4i0Jmh62ops+UyyZo9ziXjY7kd1BZOkxcdTAwMWRcdTAwMDJcdTAwMDE6nFx1MDAxZafqfPNkXHUwMDEzi0f3NfVolnbam8tLzetcdTAwMTSc3lx1MDAxNsu3/U7455FKXHUwMDE2XHUwMDAzsNo5cEjOs9ZcYlQlXHUwMDEyc1x1MDAxZDOZIdTKREzwNyC11yk2u+1ih4UqidBMoKV1oCU5/6BcdTAwMDRcdTAwMTeNPUjURvx00YZj/YVevoIkM0Dvb6S1k8227i5cdTAwMTd2O+L6/uxot/d0u7Qsc0lrMt30Y9+FjFTKTY2X5NvOPa+xXHUwMDA3xjZcdTAwMWVcdTAwMGKZ0ESjYCFcdTAwMWSQluw3WSHeXHUwMDBillRa0zJQ7KNcdTAwMTmGpVKko8bda06aVE4zsdHfiNhOulHuypTYJuj6OLGNjmT+xEb4io9GrDmdmoHYepv793dnhVOSK9pcdTAwMTb11X1xWdTyXHJUNuJcdTAwMDKQ7Fx1MDAxZFx1MDAxOcmKSUWm+GewkVx0j7HK6OGvV+xPxllZvlxyqNJHXHUwMDE0tfQhRVx1MDAwMEJ4U9DRI4Y9tVxioFx1MDAxN4HRdttrhZ3G1fZB5Vxm3IFdxc5BweWR0Vgjplx1MDAwM8VJa6XR01x1MDAwMyX5tnNcclx1MDAxNCNkYJBlTDknXHUwMDE1uFFPzYdcdTAwMWQtX/LMXHUwMDBlc1x1MDAwM8rboo7SXHUwMDEwXHQhPqKOmVx1MDAxMNpcdTAwMDRcdTAwMWT/x6OOmFx1MDAxZVGRhqWG3Ybpg45cdTAwMGbFjZ3b0l2jcnF5fP10V7s7abTa+capZsuTbUpCUqCdXHUwMDE4XG7d95ijXHUwMDBmdUi2P5FIXHUwMDExRC6/xfIsqopN3jxTXHUwMDAxXHUwMDFhRipZdsM0JuDU6GA82Mj8ZkgzVlx1MDAxN4rDjlx1MDAxYnedyn1bdS/XLndP7vTJ8s3RToTD/pH8tj9idndcdTAwMGbbJ8c7u0/rz4W9rytcdTAwMWLP+5WHs9FP+fn5xU6n9ZhHblx1MDAxNDaVXHUwMDFiXHJaQ+ysT1x1MDAwZrnk2cw35Fx1MDAxNLEmM8CeXHUwMDFlXG4tXCJRyp9BTGBezFxic5lcdTAwMDYxXHUwMDFkOXZD3d+JXHUwMDFi51x1MDAxOMScwFx1MDAxNn88iGl1/OmfMLXOx7LVXGbbcYgnXHUwMDE0XrVLXHUwMDA3y7R7b+5cdTAwMGaWz/C8mG+YWkeB88FcdTAwMTih2Hi0MLpcdTAwMWTHfFx1MDAxOZDWftNcdTAwMDUmbJxfgy1cdTAwMDO8Na9EXHUwMDA2YJVD8z14mVx1MDAxOL2c5OsxyJ1hXl0omjy9e+o5fXhTuO/unj5cdTAwMWScPlZKzcs80lx1MDAxOURcdTAwMWO5XHUwMDE4TtjzUNZE9eUknCTfda5xwlx1MDAxOFx02KHVXGIsgSgxln/FRINcdTAwMWFcdTAwMDVMzr96XHUwMDBmTt7k6jl2Q4Vx9o+HLs1cZkKaX1dvgo7/466eTLc7UTo2uKKpRlx1MDAxMzPBVKu2Xl7fXFxWx7fN6spRL6ztVvNcclQtZUDWOf/H25/xXHUwMDFkOVxmpGBcdTAwMWOTZK5BgTY2sGzszky244TP9rRCwWJR2rf9sHJy/LBz2jhdUeHeXHUwMDFk9l5e9mentFm0xdsoTVB69JKVPFx1MDAxYlx1MDAxYmpcdTAwMDZOS77tfENFqVx1MDAwMKQlxVx1MDAwZan3kUajl5JtP4VGStQsw3ODyts25NgsJzDa/HFcdTAwMWbt95HaXHUwMDFj9+MmqPk/vVx1MDAxZodcInU/XHUwMDBlWWDYXHUwMDE4ijj2k3Cqv8GualY2Lm63ji56rdrFc6ma8+il8b6PXHUwMDE1gm045jRnYjhl58mCNWBB5p3SpN/MU1wiXHUwMDBiW/Q3MtrOSq1wXa/0X1x1MDAxZc6ezpw41Fx1MDAwZjvqNJeMlp5cXCx9uu1MiZPJd51voDhcdTAwMTEge8NaWVBKUsz2Y0LjXHRcdTAwMDCf9Zs7QpOgeIW0c1x1MDAxOYT5P1x1MDAxOG2Slv/TjCZdajTFkdJupMBrXHUwMDEyUOsvvX5ono626aZ2+SS3Tlx1MDAwYpVWzp00NjdcdTAwMDNccopcdTAwMDSjJInRKFx1MDAwMIdcdTAwMGLhpKFcdTAwMDS+XWlcdTAwMTerXHUwMDE0YGv1aWl/7dtcdTAwMDNumcJ563Cti4fiKpeMplRcdTAwMWFQlFBcdTAwMGUs4vTh+eS7zjVQXHUwMDFj+mpcdTAwMTm07KSpJEbTXHUwMDAxc3o+XTRSjGwls1xiyH9cdTAwMTDaJCU/f0L7vtGeiNFI2lM8jlwiXHUwMDE4okpHU58mgfTs7uJaq6W9q4JYbX89K5ysVSjvxackXHUwMDA2kVx1MDAxMnQkhVx1MDAwNFx1MDAxN8trtn6rm8lMK+mrepBiXHUwMDAzy6b41GtcdTAwMDIj+Y+14IvaXHUwMDEy9lx1MDAwNjRcdTAwMDbGoWFtYnztW2TL/We2JFx1MDAwMmvULDa+x7lMTuayuVx1MDAwMLnbK3Z6y9Vmpdq8if9K2KykXFypXHUwMDE3u71Cq9Go9nhcdTAwMThcdTAwMDetarNcdTAwMTd/xeB9lzwobsPi2Fxc8DtHr8XR0/bvOMrfw/99XHUwMDFlitfgh1////c/XHUwMDEyX+3zLNDvXGJcdTAwMTFcdTAwMDKxLWOjv40qXHUwMDE4XHUwMDE0n6BUrL/JKT3p7VJcdTAwMDVpcDEmQsM3+1x1MDAxNP3+XHUwMDA2JaLTXVfh038lqulN4sd253y3v9PYW281L+6X1rfra6eYbyXCk1x1MDAxObBcdTAwMGaIxmtcYmbOmFx1MDAwZaHAXG6wvLzKZ4+pV9Jl3pNzXHUwMDFkaGO1RVwiXHUwMDEwJFEm6Vx1MDAxMFx1MDAxYlxmTFx1MDAxMdZmjq14NVx1MDAxZeJcdTAwMDFUbNkrMZdcdTAwMTjPh1x1MDAxNpmXXHUwMDE2SV17/1x1MDAxOFv1rHBvIdW+XHUwMDE3isgnpMzQ16XRqtk6vcgte/fyVD5+OO8/7dzlXHUwMDFi9laKXHUwMDAwtWDVTEL7XGJuXGb3MiBCZ5Enwkas/yxRXHUwMDBmwSBcdTAwMDdYXHSByoDBiFx1MDAxNzHMTIWA9Y5jXHUwMDBix/fQkGrMdnBiYFx1MDAwMc2nJP5cdTAwMDP180J9+tr7x9iqZ1x1MDAwNfvoxupcdTAwMTjbM9lcdTAwMWIjZihcdTAwMWN56G+tt16ueo0t0Fx1MDAwNWdPr85L7d18w55ccquAQDskJUG60Vx1MDAwMitpdeBcZs+671xm40w6178nl1xi2NpwXHUwMDE22NpwXqs7MFx0sGe974tSmVx1MDAxMIyQPlE3XHUwMDBle6+2nNZmLsnpXHUwMDFmsJ9cdTAwMWbsU1x1MDAxN98/xpY9M7pcdTAwMTep2bZcbqQvSpqhXHUwMDBlpbN8uW/2e7Xr8lx1MDAwMW223UV9+3ytm2/YK2NcdTAwMDIljXIk5CCiXHUwMDE2w71cboQwSrOhj9qQSI9cdTAwMTS8h++1XHR4zY0yliz55U+ke/T7hchOh3Q2kjH2KzdJO2FtdD/xXHUwMDAz+Fx1MDAwYlx1MDAwMPzUtfePsVXPXGb2Mj1CqJ0ky1x1MDAwMj893W9udeDrVVx1MDAwYvb1s7zdR+y6Z7uab9wzVtjK14ZVKoGLhGiG3r1lT1x1MDAwMDRcdTAwMDFb0faV1lx1MDAwN+/AvcJAWUHGR1wiQWmbZOaLQFvhQPB1LfU43zPgtTNcdTAwMTbEXFwqqj9wPy/cf01dfP9cdTAwMThb9qyAL0xkXHUwMDFme6zniZTs4msz9Gon9m39tlx0l/XLdkcvN3dezo7C0no95ykpimRgjN9mtk77VrUjyEenXHUwMDAzMsKhNKDQmXTgT7M1kFp5XHUwMDFhsFx1MDAxOce2niFyZEDCXHUwMDEwJFFL3yjWQH4nUehcdTAwMDRcdTAwMDefXHJcdTAwMTY2XHUwMDA1UdpcdTAwMGZTf7GQn776/jG27plB375cdTAwMDJ9so6HY2aI6Fx1MDAxZm3WXHUwMDBi2/tbstHqX2zfXaxvPi+FrXxDX1vWucTkrn1cdTAwMTWsjFXAomMnXGadNd5cdTAwMTCTyuL7SD9cdTAwMWT7zNXOXHSJIFx1MDAwNIFNqjqnQFx0XHUwMDAx1lx1MDAxN1xugc+3XHUwMDE5gz7fhSZrP6C/cNBPWXz/XHUwMDE4W/ZcdTAwMTmRPyHDzaSa/Fx1MDAxNr2Lodz0vH/drlR2e83G/dP+Sa+hbl521rdz3lx1MDAxOYb5PrA+jY9tLjaCaDTC51tOMOZ9oNNJq9+dt1x1MDAxM5adS8A+ylx1MDAwMFP6tb/S3VaCRamyydz5jTlt5rlycnnWuzl8+Vq/fGlcdTAwMWZ2a8tcdTAwMGVymdOG6TltTFx1MDAxNCpaXFw6XHRcdTAwMTnJN51rZDhcdTAwMWMkrYE/ssFZq2LNWIxcdEhcdTAwMDKbK8LvR84po+1NfSFcdTAwMThcdTAwMTTAPrz6XHUwMDFiZWjPsS/EXHUwMDA0nT7/vlx1MDAxMKmWq3ap+HTGILtBYvpg1XLYMLR62S923cbqTlx06b66eptzgGpcdTAwMTO4QVx1MDAxMTt/MXvESt2dXHUwMDBi2KRl835QySPSXHUwMDAxOt3uVDJ1QcDIXHUwMDA0yVrCXHRcdTAwMTBOyoRcdTAwMDbtWlx1MDAwNbxU1oFhx5qt00jN/Y89aa1cZrj5NE76sFrnZrWmr71/jK16pmYrvlJcdTAwMTNslTFcZn2a3m69LPXvb9bE8WZRmdPGkipcdTAwMTZ3njbzXHJ+o/0mgC++YLpDgaNOq7dbvTVrPW9af6TKO+m57MqJhitcdTAwMGbCeb51wjn2nlx1MDAxM+zXxFZp4DtdsFZarH6f8LTbPul8vd+W+mSvt+Mu+zxcdTAwMDF5tFxcZXq2XHUwMDE2SadcdTAwMDDZoptcdTAwMWFcdTAwMWPJd51vcDhcdTAwMWSwyVxufrKtiEHD2EBcdTAwMTmdXHUwMDFkNDK0XFyZyoFcdN1mcfjWh+k6Sa3/4ZZmPp8oXHKlhlx1MDAwMFx1MDAxMVx1MDAwMWawX1vmeHetUmtcdTAwMWaftjvll+fj5tdmId8oJUuB0EKA8YHTaJeP51x1MDAwMYBcdTAwMDb58o5JgrR7Leo6XHUwMDA1SkNcdTAwMTeWw1x1MDAwNJRatqA1+MbVWlx1MDAxM7NYgvWqhoP+6V/y2lx1MDAxOGVhsUpcdJew2K61XHUwMDBmto528WFvufi0WVjZXpq20edz4+CaXHUwMDBlsX9XO1uqXHUwMDE07dpV92DzdPRT3tnoc/6kyEZqXHUwMDFh3JDFzJGS05Ni8mzmXHUwMDFjbo5ccnZUaEE7ZkerY3hz2eEttTGaXHRAKbZajVx1MDAwNKa6pOInXHUwMDFhw5vwiVZcdTAwMDKiu65cdTAwMGLKipGsukmd0eqtUrH++f+aXHUwMDA3YadR7Vx1MDAwZWR4PiQ5gTjG+qR9XHUwMDFmWNKwpmJMfFx1MDAwZmOqSNp9fLOC0Fx1MDAxODNcdTAwMGJjXHUwMDE2+kb3XHUwMDFmzi9cdTAwMGWXtixsmFBcXJ01XHUwMDBl8lx1MDAwZWFcZlx1MDAxNEqBmj1cXClxtMpYk1xmhG88I40jXHUwMDEw0c6bb6HMYqVSMYmUaaVmZWr8mVx1MDAwZiSmokzpXHUwMDAwhJXzST+e34lFtceL3eVVKvdcdTAwMGLy5Fx1MDAwNFe7fbl1nUNqk2hST5U1lt097WagtuS7zjkubOCEP4XEXHUwMDE45Vx1MDAxOS5cdTAwMGVcZpMhMDLmNuNHZFx1MDAxN79B2vTctl+qsZb/XHUwMDFk3DZBxce57cfA5sVtr2TfRWLiY2U2goyYIVx1MDAwMef1nvy5hK+TOrCMYc02KFxiXHUwMDE3q7JBR1x1MDAwMVx0x85cdTAwMTk5n6GTXHUwMDFlr5lmIyMt/8bHKVx1MDAxOVx1MDAwNVJp6/tcdTAwMDOrpINmReBI+t5cdTAwMDFcdTAwMGW0XHUwMDE52fL8mWxPVls2rz9Kalx1MDAxN2snI3Xx/WNs2Ydv9yn6/W0mLVJq/o3xXHUwMDE1fVx1MDAxON3Xm0jdl25VPOpObemob3lldPN8v5lv7CunfXqTkb6MxsSxr3hlyNcxXHUwMDFiXHUwMDBmfmteKbT53VEgYclcdTAwMTCPXHUwMDFlXHUwMDE2y6bdWC+sNK0obe9XrjbgunxcdTAwMTVcdTAwMWXsrU1cdTAwMWJcdTAwMDZqPq+HN4VDXXz8erBTg7a054ePo5+S9zBcdTAwMTB/SmrUVfsoJMyQ1ZM8mbnGXHUwMDFiy2vg3TZlpTXa4GhcdTAwMWS7slwiQ7xlaymjctafXHT1N7KUf19cdTAwMTRoXHUwMDAyc+QqXG5cdTAwMTTNb47v/TOFXHUwMDAzOTlD4s9NSLXy+lV3zZXvnzdLV7ftrduc91x1MDAxOVa+KyNcdTAwMDGRXHUwMDAz61x1MDAxNMVcdTAwMTJ/NJE/XHUwMDBixkqhhSaboyhcdTAwMTA76NqfizGXvlVzY8xcdTAwMWIoXHUwMDBielRcdTAwMTeXpW5v+2HTLG0rzGNcdTAwMGZGadJcdTAwMGZcdTAwMGa0bKn4XGLI9CkxyXeda1ho8KfhMq+wXHTHlrrUMVi47GCRKbNZJLY81OJcdTAwMWb8ksdcdTAwMTDQXHUwMDA09f6bQ0BcdTAwMTN8QZ2e1CZ8XHUwMDE2tpylyZI4LZzaneuT6jb1N/v93dX+TaWcb1x1MDAwNFx1MDAxYnCBdb6NXHUwMDFjSYtKjmbuqEHKuWRcdTAwMDfdsTeOkX5cYjlwXHUwMDA2NVx1MDAxYX9cXM1iJbSVqmcnq6trW7tcdTAwMWR47jds7666XHUwMDA1+38rZ1Clm5K+3or4a/pGJ8nTmW/EXHRcdTAwMTUgoW93XG5CSlx1MDAxZMtcdPBtzTJDXFy27iCxLWktZVx1MDAxMX5ZXHUwMDE01vx97uBcdTAwMDTuyJU76GQ6hlx0lSUzQ7Lr4fmLva71NlxuxXO9ffZAYVs+5Lx82YBccixcYs1cdTAwMDBms1e7uDcoXHUwMDAyxfYuOnbXpIX4uP6gO+hYvYCJNi1aXGLObJftavdy+/T+8Vt346xfflkp1fLIbcak41x1MDAwMlx1MDAxNGglXHUwMDExpvdcdTAwMDeTbzvfwFx1MDAxMFx1MDAxNFxia1x1MDAwNDt8Tlx1MDAxOOfi/qDOXHUwMDEwXHUwMDE4mXKblMYhW1x1MDAxZlx1MDAxZlx1MDAwZeHwRZlR21x1MDAwNFxynyuH0KbvVVxi0EhWO5rePIXVdvH56aDSPLkvbsHafe2o9i3niT1WqkD6RkfaN9eFWD+uXHUwMDAxt0lf/aitMyDfV8mRKbdcdFx1MDAxZoNCq+bTd29+tfk3d7dbJy9bXHUwMDA3p7JcdTAwMTL2i3T50rt+wzHX8ye36ImSY1x1MDAxOTNsXHUwMDEyXHUwMDAyzEJuybedb2SgL7+UPOG+U1x1MDAxZKp4xpvOXHUwMDEwXHUwMDE52UY7XHUwMDE5QiPFmVx1MDAxZtyWXHUwMDE5t03Q8LniNpfeYlaD71+G0+O3XtlTXHUwMDA1acVBdenuyZZ3Ko/9m2LO8cvMxr6PMsI5cFxuh5/8PdRpXHUwMDAzf/wmWq19osn7umtkm8ptfX6t04tFbL3wXHUwMDE5cNuuXHUwMDFhvD3cXX/Y29ktXHUwMDFmtqaNdHbgvFRcdTAwMTNl/dS9OG2e6Zu1jfO95Vx1MDAwNYt0XHUwMDFhnVxuN2vYsDJGTVx1MDAxZiVJns1841xyRcC0XHUwMDAzilx1MDAxZEHfOCm2teBcdTAwMGIrMsNbtoFO4ds88bj/RtVPv48xJzBHrlx1MDAxOJO9iVRvUJNTLNoz5IperH/bck+l65XL1ZV27+z02Vx1MDAxZZRzfvRcdTAwMTIjJ3BorFx1MDAwNvBcdTAwMDXDXHUwMDEw50xcdTAwMTM4IXgqJKFcdTAwMTHwzorhTDlcdTAwMTNcdTAwMWOxUtFysTq1VW42S6V6vdBqnDz1WzvXnbWVZnta0nxovtTCzsFd52Gf7o5s9faxtFtYNNKEdMR5L1x1MDAwNlxiZuiHnjyd+UZcdTAwMWNRgFx1MDAxMpE0OCtcdOKsKTNEXFy2IVRcdTAwMDDt/3zUVUVelFx1MDAxOWtO4I58saZIra1cIkFucI7a1Fx1MDAxMP623928Pnva2ulsnZTXOqd6K1Q5P8qEXHQz8OeWXHSjWFx1MDAxY0GOkqZPXHUwMDE2XHUwMDA1oVx1MDAxMKU/YMZSjrZcdTAwMDeFs+xcdTAwMTmjm89RZXMjzbPTx3phqb3jjm5cdTAwMWHuUmx/7ZVvW3kkN63TXHUwMDBm+7BaOe1TPqZGRvJt51x1MDAxYlx1MDAxOcT0xbdcdKgkk1xcLNvM54tmh4xMyc2QL1x1MDAxYnNcdTAwMTng4oPa4tQ2QcPnidpcXHpIx7co0k7NkPhSutwqnO1ff3sxR5Xnb9vqsVx1MDAwMptf841fMjpcdTAwMTBcdTAwMGVYIPm1XHUwMDAwXCJWyqSFzyX1wVx1MDAxM2f4b/rpfFNcdTAwMWPZ0etcdTAwMTSb3Xaxw8I1jmFcdTAwMDFcdTAwMTToQVx1MDAwMp3wp4qDTWiDKFjXaKmtNNpcdTAwMTjnRlwiTD+yYay0TNLzOY53XG62+22E9HpjT5Hej15qQUxKdnqPq9jf31rphFt7V63Tr+tY0Dudk5yfUe+kXHIkWlx1MDAxND5cdTAwMWGJhFwiJtRcdTAwMTigYmOOpZu9L0pPyJyiXHUwMDE2/jdcYrXSXHUwMDA0vqr6T9XI5kSoUaRcbrXmgYCJztDEw5NXXHUwMDBm1dnKQ2nv+GDDiO21ZbEsc57Xr6xccjT4lq/Gx7JNLMuYNbWTRqHTXHUwMDEyIXr00lx1MDAxYlx1MDAwZVh5VailoEAhOc2eiJL+tJckoVx1MDAxNlx1MDAwMaIxvv6VRZfEeJtcdTAwMDcv1E6bP9a9NidSTS49O0kpQvSdS6ZcdTAwMTbrl+Xb1eeL3UKtdLrXKnYqXHUwMDE3oFd6+Vx1MDAxNmsrMDAsIWhcdTAwMDFcdTAwMWN7TLHeXHUwMDA1mlx1MDAwMuNcdTAwMWLRayn8WXzpOVx1MDAxOCFcdTAwMWL+KN6jq9m/RiPZkzFa+L7KSWJccoEvjTPCgj9YMlx1MDAxMlx1MDAwN1x1MDAxOIo1M4rMpM1evsU6tVx1MDAxMVx1MDAwZqbnJKBcdTAwMDJ2u+xcZoXFr/fMzKVAK2I9Tdqy0KK3nGNcdTAwMTa1MGxcdTAwMTKwq8py5Fu2pnvE77WoXHUwMDE5VtLn7Voh+dOETLA9XHUwMDE0XHUwMDA1Qvqj0CV76MogjVx1MDAxZoEpfOt5XrO55Jfnvlx1MDAxYk9kyOH3QU3T79FPR6vc9+OHwE8sSVYsrOHYXCKNnEH75abY9pNcdTAwMTD4xExcdTAwMWO0i7BAZO3YrCxUXHUwMDFmoFS5849xiVx1MDAxYr7fp+j32Vx1MDAwZjJRqWqHTW/pWFpmiMS9XpeXS72jXHUwMDA3XHUwMDAxYjRg2Vx0XHUwMDA2/jaqdySw3lHExlx1MDAxZlvLJrLHnbV9aFnkpW/pw3zONntk0ofROFx1MDAxN1x1MDAxMPpcdTAwMTNRgVWhZCNwXFzvOMk8zCz8oXeieuf1krKI3vGn4Vnyp5ZcdTAwMWK2qdgvXHUwMDE4nmH8S/F4w4vIq3fn2J5nK2uhXHUwMDE1T6rc+ce4xGWld2R6XHUwMDEytZDSV2bNklDyepZcXC71jvXBXHUwMDE28DlfXHUwMDA2feenmN5he4cnXltcdTAwMDUgeK7St7ffXHUwMDFibOFcdTAwMTU2XG6QNOtccm+1JJz7q2SglD8kUFx1MDAxOM3eZ/Sgwp96x9DAr/17XHUwMDFlo5Smdl5P8IqqXHUwMDFk9o9Q8fSjc8RcXFx1MDAwM2pcXO2wday9PUSWfKd0UFx1MDAwYq11UqXOP8blLTOto1KDYb5cdTAwMDOjL9WbXum8nmWUS6XjNHpjx5/wwapHxVx1MDAwMrxcdTAwMTJcdTAwMDPWQ75cdTAwMTWibz/xSsX9u1x1MDAwM7yBYVnWTChcdTAwMDBWICTtPKpAkW80rkBJIWTEO1x1MDAxZVx1MDAxZVgjeLBzSmxbWKXzen7MqNJBpXw5riFcdTAwMDFCa1x1MDAxM2k9+kPpOO+HeZfbSGBnS4/PylwiKZ1UsfOPcYFLUzqffnzAl2K7fdTjhf+1XHUwMDFhLIDVyo9ccubhXX55qIaPy+lcdTAwMWLzn37Mp9dcdTAwMTjhQFx1MDAxYf/69Nf/XHUwMDAzVVjGXHUwMDE3In0= + + + + + GroupRoleUserGroupRoleGroupUserUserUserRoleRoleGlobal PermissionObject PermissionGlobal PermissionObject PermissionGlobal PermissionObject PermissionObject PermissionObject PermissionObject PermissionObject Permission \ No newline at end of file diff --git a/docs/docs/media/tutorial_1_branch_creation.png b/docs/docs/media/tutorial_1_branch_creation.png index 5759f3d2b1..52674d101e 100644 Binary files a/docs/docs/media/tutorial_1_branch_creation.png and b/docs/docs/media/tutorial_1_branch_creation.png differ diff --git a/docs/docs/media/tutorial_1_branch_details.png b/docs/docs/media/tutorial_1_branch_details.png index ae9bb9e417..a50ddb5052 100644 Binary files a/docs/docs/media/tutorial_1_branch_details.png and b/docs/docs/media/tutorial_1_branch_details.png differ diff --git a/docs/docs/media/tutorial_1_branch_diff.png b/docs/docs/media/tutorial_1_branch_diff.png index 3c77b29435..9ccd2702e6 100644 Binary files a/docs/docs/media/tutorial_1_branch_diff.png and b/docs/docs/media/tutorial_1_branch_diff.png differ diff --git a/docs/docs/media/tutorial_1_branch_list.png b/docs/docs/media/tutorial_1_branch_list.png index 04cb5d89cb..3e4759b90b 100644 Binary files a/docs/docs/media/tutorial_1_branch_list.png and b/docs/docs/media/tutorial_1_branch_list.png differ diff --git a/docs/docs/media/tutorial_1_organization_create.png b/docs/docs/media/tutorial_1_organization_create.png index 3eee13bf55..5c9575787e 100644 Binary files a/docs/docs/media/tutorial_1_organization_create.png and b/docs/docs/media/tutorial_1_organization_create.png differ diff --git a/docs/docs/media/tutorial_1_organization_details.png b/docs/docs/media/tutorial_1_organization_details.png index 43dcdd4542..c6a4af445b 100644 Binary files a/docs/docs/media/tutorial_1_organization_details.png and b/docs/docs/media/tutorial_1_organization_details.png differ diff --git a/docs/docs/media/tutorial_1_organization_edit.png b/docs/docs/media/tutorial_1_organization_edit.png index e1d8434c19..fc877d4651 100644 Binary files a/docs/docs/media/tutorial_1_organization_edit.png and b/docs/docs/media/tutorial_1_organization_edit.png differ diff --git a/docs/docs/media/tutorial_1_organizations.png b/docs/docs/media/tutorial_1_organizations.png index 7eab989d58..624d36e70d 100644 Binary files a/docs/docs/media/tutorial_1_organizations.png and b/docs/docs/media/tutorial_1_organizations.png differ diff --git a/docs/docs/media/tutorial_2_historical.png b/docs/docs/media/tutorial_2_historical.png index cd6cfd6e0d..e3d97e9830 100644 Binary files a/docs/docs/media/tutorial_2_historical.png and b/docs/docs/media/tutorial_2_historical.png differ diff --git a/docs/docs/media/tutorial_3_schema.png b/docs/docs/media/tutorial_3_schema.png index 2a808c338b..817874df49 100644 Binary files a/docs/docs/media/tutorial_3_schema.png and b/docs/docs/media/tutorial_3_schema.png differ diff --git a/docs/docs/media/tutorial_4_metadata.png b/docs/docs/media/tutorial_4_metadata.png index 637a5a9198..60a5aa9381 100644 Binary files a/docs/docs/media/tutorial_4_metadata.png and b/docs/docs/media/tutorial_4_metadata.png differ diff --git a/docs/docs/media/tutorial_4_metadata_edit.png b/docs/docs/media/tutorial_4_metadata_edit.png index f08829abf2..1ef2cf2b8d 100644 Binary files a/docs/docs/media/tutorial_4_metadata_edit.png and b/docs/docs/media/tutorial_4_metadata_edit.png differ diff --git a/docs/docs/media/tutorial_6_branch_creation.png b/docs/docs/media/tutorial_6_branch_creation.png index bd43dac53f..87203d518c 100644 Binary files a/docs/docs/media/tutorial_6_branch_creation.png and b/docs/docs/media/tutorial_6_branch_creation.png differ diff --git a/docs/docs/overview/interfaces.mdx b/docs/docs/overview/interfaces.mdx index 7061433214..9d975cde32 100644 --- a/docs/docs/overview/interfaces.mdx +++ b/docs/docs/overview/interfaces.mdx @@ -54,6 +54,6 @@ More information can be found in the [Python SDK](../python-sdk/) documentation One of the three pillars Infrahub is built on is the idea of having unified storage for data and files. The data is stored in the graph database and the files are stored in Git. -When integrating a Git repository with Infrahub, the Git agent will ensure that both systems stay in sync at any time. Changes to branches or files in a Git repository will be synced to Infrahub automatically. +When integrating a Git repository with Infrahub, the Task worker will ensure that both systems stay in sync at any time. Changes to branches or files in a Git repository will be synced to Infrahub automatically. More information can be found in the [external repositories guide](../guides/repository) diff --git a/docs/docs/overview/readme.mdx b/docs/docs/overview/readme.mdx index 25dae372cb..aa88ef332e 100644 --- a/docs/docs/overview/readme.mdx +++ b/docs/docs/overview/readme.mdx @@ -5,7 +5,7 @@ import ReferenceLink from "../../src/components/Card"; # Infrahub overview -![Infrahub architecture](../media/overview.excalidraw.svg) +![Infrahub architecture](./../media/overview.excalidraw.svg) Infrahub acts as a central hub to manage all of the information and code that powers your infrastructure. At its heart, Infrahub is built on 3 fundamental pillars: diff --git a/docs/docs/python-sdk/guides/client.mdx b/docs/docs/python-sdk/guides/client.mdx index 090c70d1e4..9830e71bf9 100644 --- a/docs/docs/python-sdk/guides/client.mdx +++ b/docs/docs/python-sdk/guides/client.mdx @@ -30,7 +30,11 @@ client = InfrahubClientSync(address="http://localhost:8000") ## Authentication -The SDK is using a token-based authentication method to authenticate with the API and GraphQL. The token can be provided using a `Config` object or you can define it as the `INFRAHUB_API_TOKEN` environment variable. +The SDK can use API Tokens or JWT Tokens to authenticate with the REST API and GraphQL. + +### API tokens + +The API token can be provided using a `Config` object or you can define it as the `INFRAHUB_API_TOKEN` environment variable. @@ -53,6 +57,31 @@ The SDK is using a token-based authentication method to authenticate with the AP +### JWT tokens + +The username and password of the user can be provided using a `Config` object or you can define them using the `INFRAHUB_USERNAME` and `INFRAHUB_PASSWORD` environment variables. The usage of JWT Tokens is completely transparent to the user, including the process of refreshing the JWT token. + + + + + ```python + from infrahub_sdk import Config, InfrahubClient + client = await InfrahubClient(config=Config(username="admin", password="infrahub")) + client = await InfrahubClient() # token is read from the INFRAHUB_USERNAME and INFRAHUB_PASSWORD environment variable + ``` + + + + + ```python + from infrahub_sdk import Config, InfrahubClientSync + client = InfrahubClientSync(config=Config(username="admin", password="infrahub")) + client = InfrahubClientSync() # token is read from the INFRAHUB_USERNAME and INFRAHUB_PASSWORD environment variable + ``` + + + + ## Configuring the client object The client object can be configured by providing a `Config` object. Here we will show you how to enable the client to print out all of the GraphQL queries it will send to Infrahub. diff --git a/docs/docs/python-sdk/guides/store.mdx b/docs/docs/python-sdk/guides/store.mdx index 356e277f9b..f2d8546f0b 100644 --- a/docs/docs/python-sdk/guides/store.mdx +++ b/docs/docs/python-sdk/guides/store.mdx @@ -143,7 +143,7 @@ You can use a custom store, outside of the Infrahub SDK client. Storing or retri store = NodeStore() device = await client.get(kind="TestDevice", name__value="atl1-edge1") - store.set(key=device.name.value, node=store) + store.set(key=device.name.value, node=device) store.get(key=device.name.value) ``` @@ -155,7 +155,7 @@ You can use a custom store, outside of the Infrahub SDK client. Storing or retri store = NodeStoreSync() device = await client.get(kind="TestDevice", name__value="atl1-edge1") - store.set(key=device.name.value, node=store) + store.set(key=device.name.value, node=device) store.get(key=device.name.value) ``` diff --git a/docs/docs/reference/configuration.mdx b/docs/docs/reference/configuration.mdx index f3556c1ad9..c1ee35c0c5 100644 --- a/docs/docs/reference/configuration.mdx +++ b/docs/docs/reference/configuration.mdx @@ -30,8 +30,9 @@ Here are a few common methods of setting environmental variables: | AWS_ACCESS_KEY_ID | | | | | | AWS_SECRET_ACCESS_KEY | | | | | | DB_TYPE | | | | | -| INFRAHUB_ADDRESS | "HTTP endpoint of the API Server, used by the git agent for internal communication" | http://server:8000 | | | +| INFRAHUB_ADDRESS | "HTTP endpoint of the API Server, used by the Task worker for internal communication" | http://server:8000 | | | | INFRAHUB_ALLOW_ANONYMOUS_ACCESS | Indicates if the system allows anonymous read access | TRUE | | | +| INFRAHUB_ANONYMOUS_ACCESS_ROLE | Indicates if the system allows anonymous read access | Anonymous User | | | | INFRAHUB_ANALYTICS_ADDRESS | | | | | | INFRAHUB_ANALYTICS_API_KEY | | | | | | INFRAHUB_ANALYTICS_ENABLE | | | | | diff --git a/docs/docs/reference/git-agent.mdx b/docs/docs/reference/git-agent.mdx index 5906143845..57cd4041d1 100644 --- a/docs/docs/reference/git-agent.mdx +++ b/docs/docs/reference/git-agent.mdx @@ -1,12 +1,12 @@ --- -title: Git Agent +title: Task worker --- -# Git agent +# Task worker :::warning Under Construction This page is still under construction and is not available yet.
-Please reach out in Slack if you have some questions about the **Git agent** +Please reach out in Slack if you have some questions about the **Task worker** ::: diff --git a/docs/docs/reference/menu.mdx b/docs/docs/reference/menu.mdx new file mode 100644 index 0000000000..c3523e6efa --- /dev/null +++ b/docs/docs/reference/menu.mdx @@ -0,0 +1,80 @@ +--- +title: Menu definition file +--- + +# Menu definition file + +A menu definition file allows you to control the layout and structure of the menu on the left side of the Infrahub web interface. More information can be found in the [Controlling the menu guide](/guides/menu). + +The menu definition file is a YAML file that follows a particular structure or schema. + +At the top of the file we define a bit of boilerplate statements, to define the version of the schema of the file and the kind of content that it contains. We also define a spec mapping with a data key. + +```yaml +--- +apiversion: infrahub.app/v1 +kind: Menu +spec: + data: +``` + +The value of the data key in the spec mapping, is a sequence (or list) of menu items. + +A menu item is a mapping in which you can define the following key/value pairs: + +| Key | Type | Description | Mandatory | +|--------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------| +| name | string | the name of the menu item | :white_check_mark: | +| namespace | string | the namespace of the menu item | :white_check_mark: | +| label | string | the name of the menu item in UI | :x: | +| kind | string | selecting this menu item will take you to the list view of this schema node (this automatically sets the value for the path key to the correct value) | :x: | +| path | string | selecting this menu will take you to this path (URL) | :x: | +| icon | string | the icon of the menu item | :x: | +| order_weight | integer | controls the ordering of menu items (lower values are ordered first) | :x: | +| parent | string | a parent menu item (concatenated namespace + name) | :x: | +| children | sequence | list of nested menu items | :x: | + +## Example + +```yaml +--- +apiversion: infrahub.app/v1 +kind: Menu +spec: + data: + - namespace: Location + name: Mainmenu + label: Location + icon: "mingcute:location-line" + children: + data: + - namespace: Location + name: Country + label: Countries + kind: LocationCountry + icon: "gis:search-country" + + - namespace: Location + name: Site + label: Sites + kind: LocationSite + icon: "ri:building-line" + + - namespace: Infrastructure + name: Mainmenu + label: Infrastructure + icon: "mdi:domain" + children: + data: + - namespace: Network + name: Device + label: Devices + kind: NetworkDevice + icon: "mdi:router" + + - namespace: Network + name: Interface + label: Interface + kind: NetworkInterface + icon: "mdi:ethernet" +``` diff --git a/docs/docs/reference/message-bus-events.mdx b/docs/docs/reference/message-bus-events.mdx index db8f8b779a..41cf8bd9ca 100644 --- a/docs/docs/reference/message-bus-events.mdx +++ b/docs/docs/reference/message-bus-events.mdx @@ -205,7 +205,6 @@ For more detailed explanations on how to use these events within Infrahub, see t | **meta** | Meta properties for the message | N/A | None | | **source_branch** | The source branch | string | None | | **target_branch** | The target branch | string | None | -| **ipam_node_details** | Details for changed IP nodes | array | None | #### Event event.branch.rebased @@ -220,7 +219,6 @@ For more detailed explanations on how to use these events within Infrahub, see t |-----|-------------|------|---------------| | **meta** | Meta properties for the message | N/A | None | | **branch** | The branch that was rebased | string | None | -| **ipam_node_details** | Details for changed IP nodes | array | None | @@ -308,28 +306,6 @@ For more detailed explanations on how to use these events within Infrahub, see t - -### Git Branch - - - -#### Event git.branch.create - - -**Description**: Create a branch in a Git repository. - -**Priority**: 3 - - -| Key | Description | Type | Default Value | -|-----|-------------|------|---------------| -| **meta** | Meta properties for the message | N/A | None | -| **branch** | Name of the branch to create | string | None | -| **branch_id** | The unique ID of the branch | string | None | -| **repository_id** | The unique ID of the Repository | string | None | -| **repository_name** | The name of the Repository | string | None | - - ### Git Diff @@ -586,40 +562,6 @@ For more detailed explanations on how to use these events within Infrahub, see t - -### Request Artifact - - - -#### Event request.artifact.generate - - -**Description**: Runs to generate an artifact - -**Priority**: 2 - - -| Key | Description | Type | Default Value | -|-----|-------------|------|---------------| -| **meta** | Meta properties for the message | N/A | None | -| **artifact_name** | Name of the artifact | string | None | -| **artifact_definition** | The the ID of the artifact definition | string | None | -| **commit** | The commit to target | string | None | -| **content_type** | Content type of the artifact | string | None | -| **transform_type** | The type of transform associated with this artifact | string | None | -| **transform_location** | The transforms location within the repository | string | None | -| **repository_id** | The unique ID of the Repository | string | None | -| **repository_name** | The name of the Repository | string | None | -| **repository_kind** | The kind of the Repository | string | None | -| **branch_name** | The branch where the check is run | string | None | -| **target_id** | The ID of the target object for this artifact | string | None | -| **target_name** | Name of the artifact target | string | None | -| **artifact_id** | The id of the artifact if it previously existed | N/A | None | -| **query** | The name of the query to use when collecting data | string | None | -| **timeout** | Timeout for requests used to generate this artifact | integer | None | -| **variables** | Input variables when generating the artifact | object | None | - - ### Request Artifact Definition @@ -643,91 +585,6 @@ For more detailed explanations on how to use these events within Infrahub, see t | **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | | **destination_branch** | The target branch | string | None | - -#### Event request.artifact_definition.generate - - -**Description**: Sent to trigger the generation of artifacts for a given branch. - -**Priority**: 3 - - -| Key | Description | Type | Default Value | -|-----|-------------|------|---------------| -| **meta** | Meta properties for the message | N/A | None | -| **artifact_definition** | The unique ID of the Artifact Definition | string | None | -| **branch** | The branch to target | string | None | -| **limit** | List of targets to limit the scope of the generation, if populated only the included artifacts will be regenerated | array | None | - - - -### Request Diff - - - -#### Event request.diff.update - - -**Description**: Request diff to be updated. - - If the message only include a branch_name, it is assumed to be for updating the diff that tracks - the lifetime changes of a branch - -**Priority**: 3 - - -| Key | Description | Type | Default Value | -|-----|-------------|------|---------------| -| **meta** | Meta properties for the message | N/A | None | -| **branch_name** | The branch associated with the diff | string | None | -| **name** | N/A | N/A | None | -| **from_time** | N/A | N/A | None | -| **to_time** | N/A | N/A | None | - - -#### Event request.diff.refresh - - -**Description**: Request diff be recalculated from scratch. - -**Priority**: 3 - - -| Key | Description | Type | Default Value | -|-----|-------------|------|---------------| -| **meta** | Meta properties for the message | N/A | None | -| **branch_name** | The branch associated with the diff | string | None | -| **diff_id** | The id for this diff | string | None | - - - -### Request Generator - - - -#### Event request.generator.run - - -**Description**: Runs a generator. - -**Priority**: 3 - - -| Key | Description | Type | Default Value | -|-----|-------------|------|---------------| -| **meta** | Meta properties for the message | N/A | None | -| **generator_definition** | The Generator definition | N/A | None | -| **generator_instance** | The id of the generator instance if it previously existed | N/A | None | -| **commit** | The commit to target | string | None | -| **repository_id** | The unique ID of the Repository | string | None | -| **repository_name** | The name of the Repository | string | None | -| **repository_kind** | The kind of the Repository | string | None | -| **branch_name** | The branch where the check is run | string | None | -| **target_id** | The ID of the target object for this generator | string | None | -| **target_name** | Name of the generator target | string | None | -| **query** | The name of the query to use when collecting data | string | None | -| **variables** | Input variables when running the generator | object | None | - ### Request Generator Definition @@ -1000,64 +857,6 @@ For more detailed explanations on how to use these events within Infrahub, see t | **message** | The message to send | string | None | - -### Send Webhook - - - -#### Event send.webhook.event - - -**Description**: Sent a webhook to an external source. - -**Priority**: 3 - - -| Key | Description | Type | Default Value | -|-----|-------------|------|---------------| -| **meta** | Meta properties for the message | N/A | None | -| **webhook_id** | The unique ID of the webhook | string | None | -| **event_type** | The event type | string | None | -| **event_data** | The data tied to the event | object | None | - - - -### Send Telemetry - - - -#### Event send.telemetry.push - - -**Description**: Push usage telemetry. - -**Priority**: 3 - - -| Key | Description | Type | Default Value | -|-----|-------------|------|---------------| -| **meta** | Meta properties for the message | N/A | None | - - - - -### Trigger Artifact Definition - - - -#### Event trigger.artifact_definition.generate - - -**Description**: Sent after a branch has been merged to start the regeneration of artifacts - -**Priority**: 3 - - -| Key | Description | Type | Default Value | -|-----|-------------|------|---------------| -| **meta** | Meta properties for the message | N/A | None | -| **branch** | The impacted branch | string | None | - ### Trigger Generator Definition @@ -1321,7 +1120,6 @@ For more detailed explanations on how to use these events within Infrahub, see t | **meta** | Meta properties for the message | N/A | None | | **source_branch** | The source branch | string | None | | **target_branch** | The target branch | string | None | -| **ipam_node_details** | Details for changed IP nodes | array | None | #### Event event.branch.rebased @@ -1337,7 +1135,6 @@ For more detailed explanations on how to use these events within Infrahub, see t |-----|-------------|------|---------------| | **meta** | Meta properties for the message | N/A | None | | **branch** | The branch that was rebased | string | None | -| **ipam_node_details** | Details for changed IP nodes | array | None | @@ -1429,29 +1226,6 @@ For more detailed explanations on how to use these events within Infrahub, see t - -### Git Branch - - - -#### Event git.branch.create - - -**Description**: Create a branch in a Git repository. - -**Priority**: 3 - - - -| Key | Description | Type | Default Value | -|-----|-------------|------|---------------| -| **meta** | Meta properties for the message | N/A | None | -| **branch** | Name of the branch to create | string | None | -| **branch_id** | The unique ID of the branch | string | None | -| **repository_id** | The unique ID of the Repository | string | None | -| **repository_name** | The name of the Repository | string | None | - - ### Git Diff @@ -1721,41 +1495,6 @@ For more detailed explanations on how to use these events within Infrahub, see t - -### Request Artifact - - - -#### Event request.artifact.generate - - -**Description**: Runs to generate an artifact - -**Priority**: 2 - - - -| Key | Description | Type | Default Value | -|-----|-------------|------|---------------| -| **meta** | Meta properties for the message | N/A | None | -| **artifact_name** | Name of the artifact | string | None | -| **artifact_definition** | The the ID of the artifact definition | string | None | -| **commit** | The commit to target | string | None | -| **content_type** | Content type of the artifact | string | None | -| **transform_type** | The type of transform associated with this artifact | string | None | -| **transform_location** | The transforms location within the repository | string | None | -| **repository_id** | The unique ID of the Repository | string | None | -| **repository_name** | The name of the Repository | string | None | -| **repository_kind** | The kind of the Repository | string | None | -| **branch_name** | The branch where the check is run | string | None | -| **target_id** | The ID of the target object for this artifact | string | None | -| **target_name** | Name of the artifact target | string | None | -| **artifact_id** | The id of the artifact if it previously existed | N/A | None | -| **query** | The name of the query to use when collecting data | string | None | -| **timeout** | Timeout for requests used to generate this artifact | integer | None | -| **variables** | Input variables when generating the artifact | object | None | - - ### Request Artifact Definition @@ -1780,95 +1519,6 @@ For more detailed explanations on how to use these events within Infrahub, see t | **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | | **destination_branch** | The target branch | string | None | - -#### Event request.artifact_definition.generate - - -**Description**: Sent to trigger the generation of artifacts for a given branch. - -**Priority**: 3 - - - -| Key | Description | Type | Default Value | -|-----|-------------|------|---------------| -| **meta** | Meta properties for the message | N/A | None | -| **artifact_definition** | The unique ID of the Artifact Definition | string | None | -| **branch** | The branch to target | string | None | -| **limit** | List of targets to limit the scope of the generation, if populated only the included artifacts will be regenerated | array | None | - - - -### Request Diff - - - -#### Event request.diff.update - - -**Description**: Request diff to be updated. - - If the message only include a branch_name, it is assumed to be for updating the diff that tracks - the lifetime changes of a branch - -**Priority**: 3 - - - -| Key | Description | Type | Default Value | -|-----|-------------|------|---------------| -| **meta** | Meta properties for the message | N/A | None | -| **branch_name** | The branch associated with the diff | string | None | -| **name** | N/A | N/A | None | -| **from_time** | N/A | N/A | None | -| **to_time** | N/A | N/A | None | - - -#### Event request.diff.refresh - - -**Description**: Request diff be recalculated from scratch. - -**Priority**: 3 - - - -| Key | Description | Type | Default Value | -|-----|-------------|------|---------------| -| **meta** | Meta properties for the message | N/A | None | -| **branch_name** | The branch associated with the diff | string | None | -| **diff_id** | The id for this diff | string | None | - - - -### Request Generator - - - -#### Event request.generator.run - - -**Description**: Runs a generator. - -**Priority**: 3 - - - -| Key | Description | Type | Default Value | -|-----|-------------|------|---------------| -| **meta** | Meta properties for the message | N/A | None | -| **generator_definition** | The Generator definition | N/A | None | -| **generator_instance** | The id of the generator instance if it previously existed | N/A | None | -| **commit** | The commit to target | string | None | -| **repository_id** | The unique ID of the Repository | string | None | -| **repository_name** | The name of the Repository | string | None | -| **repository_kind** | The kind of the Repository | string | None | -| **branch_name** | The branch where the check is run | string | None | -| **target_id** | The ID of the target object for this generator | string | None | -| **target_name** | Name of the generator target | string | None | -| **query** | The name of the query to use when collecting data | string | None | -| **variables** | Input variables when running the generator | object | None | - ### Request Generator Definition @@ -2155,67 +1805,6 @@ For more detailed explanations on how to use these events within Infrahub, see t | **message** | The message to send | string | None | - -### Send Webhook - - - -#### Event send.webhook.event - - -**Description**: Sent a webhook to an external source. - -**Priority**: 3 - - - -| Key | Description | Type | Default Value | -|-----|-------------|------|---------------| -| **meta** | Meta properties for the message | N/A | None | -| **webhook_id** | The unique ID of the webhook | string | None | -| **event_type** | The event type | string | None | -| **event_data** | The data tied to the event | object | None | - - - -### Send Telemetry - - - -#### Event send.telemetry.push - - -**Description**: Push usage telemetry. - -**Priority**: 3 - - - -| Key | Description | Type | Default Value | -|-----|-------------|------|---------------| -| **meta** | Meta properties for the message | N/A | None | - - - - -### Trigger Artifact Definition - - - -#### Event trigger.artifact_definition.generate - - -**Description**: Sent after a branch has been merged to start the regeneration of artifacts - -**Priority**: 3 - - - -| Key | Description | Type | Default Value | -|-----|-------------|------|---------------| -| **meta** | Meta properties for the message | N/A | None | -| **branch** | The impacted branch | string | None | - ### Trigger Generator Definition diff --git a/docs/docs/reference/permissions.mdx b/docs/docs/reference/permissions.mdx new file mode 100644 index 0000000000..ccae2fe5a2 --- /dev/null +++ b/docs/docs/reference/permissions.mdx @@ -0,0 +1,62 @@ +--- +title: Permissions +--- + +# Permissions + +This page provides detailed documentation for all available global and object permissions within Infrahub. + +:::info + +For more detailed explanations on how to use these permissions within Infrahub, see the [roles and permissions](/topics/permissions-roles) topic. + +::: + +## Global permissions + +Below are the eight global permissions possible in Infrahub: + +| Identifier | Action | Decision | +|-------------------------------------------|-------------------------|----------| +| `global:edit_default_branch:allow_all` | `edit_default_branch` | `Allow` | +| `global:manage_accounts:allow_all` | `manage_accounts` | `Allow` | +| `global:manage_permissions:allow_all` | `manage_permissions` | `Allow` | +| `global:manage_repositories:allow_all` | `manage_repositories` | `Allow` | +| `global:merge_branch:allow_all` | `merge_branch` | `Allow` | +| `global:merge_proposed_change:allow_all` | `merge_proposed_change` | `Allow` | +| `global:manage_schema:allow_all` | `manage_schema` | `Allow` | +| `global:super_admin:allow_all` | `super_admin` | `Allow` | + +### Attributes + +- **Identifier**: A unique string that identifies the permission, computed by the backend based on the **Action** and **Decision**. +- **Action**: The action that the permission permits, such as `edit_default_branch` or `manage_accounts`. +- **Decision**: Indicates if the action is permitted or prohibited: + - **Allow**: Grants permission for the action. + - **Deny**: Denies permission for the action. +- **Roles**: These are the roles that make use of this permission. + +## Object permissions + +Object permissions can be applied to different types of objects and across different branches. + +| Identifier | Object Type | Action | Decision | Description | +|-----------------------------------------------|---------------|---------|-------------|---------------------------------------------------------------------------------------------| +| `object:*:*:create:allow_other` | `*` (all types)| `any` | `allow_other`| Allows creating any object, but only on non-default branches. | +| `object:*:*:view:allow_all` | `*` (all types)| `view` | `allow_all` | Allows viewing any object, anywhere, across both default and non-default branches. | +| `object:Builtin:Tag:update:deny` | `BuiltinTag` | `update`| `deny` | Denies the ability to update any object of type BuiltinTag, across all branches. | +| `object:*:Generic:view:allow_all` | `*Generic` | `view` | `allow_all` | Allows viewing all objects that contain 'Generic' in their type (example: LocationGeneric, DeviceGeneric) in all namespaces, across all branches. | + +### Attributes + + +- **Identifier**: A unique string that identifies the permission, computed by the backend based on the **Action**, **Object Type**, **Branch Type**, and **Decision**. +- **Object Type**: The type of object the permission applies to (such as, `tag`, `device`). Wildcards (`*`) can be used to apply the permission to all object types. +- **Action**: The specific action allowed on the object, such as `create`, `update`, `delete`, or `view`. +- **Decision**: Controls whether the action is allowed or denied, and under which branch type it applies: + - **allow_default**: Allows the action on the default branch. + - **allow_other**: Allows the action on branches other than the default one. + - **allow_all**: Allows the action on both the default and non-default branches. + - **deny**: Denies the action regardless of branch. +- **Roles**: The roles that use this permission. + \ No newline at end of file diff --git a/docs/docs/release-notes/infrahub/release-0_16_4.mdx b/docs/docs/release-notes/infrahub/release-0_16_4.mdx new file mode 100644 index 0000000000..c1c69f1bf5 --- /dev/null +++ b/docs/docs/release-notes/infrahub/release-0_16_4.mdx @@ -0,0 +1,72 @@ +--- +title: Release 0.16.4 +--- + + + + + + + + + + + + + + + + + + + +
Release Number0.16.4
Release DateOctober 17th, 2024
Release CodenameBeta #5, Patch #4
Tag[infrahub-v0.16.4](https://github.com/opsmill/infrahub/releases/tag/infrahub-v0.16.4)
+ +# Release 0.16.4 + +We are thrilled to announce the latest release of Infrahub, version *0.16.4*! + +This release focuses largely on bug fixes and is driven by our Beta Test users, +and as always we greatly appreciate their feedback and time! + +## Main changes + +The complete list of changes can always be found in the `CHANGELOG.md` file in the Infrahub Git repository. + +### Fixed + +- Fixed an issue on the UI where a new relationship was being added to the main branch instead of the current branch. ([#4598](https://github.com/opsmill/infrahub/issues/4598)) + +## Migration guide + +To migrate your instance of Infrahub to the latest version, please run the following commands and restart all instances of Infrahub. + + +```shell +infrahub db migrate +infrahub db update-core-schema +``` + + +> if you are running in docker these commands need to run from the container where Infrahub is installed + +### Migration of the demo instance + +If you are using the demo environment, you can migrate to the latest version with the following commands + +```shell +invoke demo.stop +invoke demo.build +invoke demo.migrate +invoke demo.start +``` + +If you don't want to keep your data, you can start a clean instance with the following command + +```shell +invoke demo.destroy demo.build demo.start demo.load-infra-schema demo.load-infra-data +``` + +> All data will be lost, please make sure to backup everything you need before running this command. + +The repository https://github.com/opsmill/infrahub-demo-edge has also been updated, it's recommended to pull the latest changes into your fork. diff --git a/docs/docs/release-notes/infrahub/release-1_0.mdx b/docs/docs/release-notes/infrahub/release-1_0.mdx new file mode 100644 index 0000000000..897d81017f --- /dev/null +++ b/docs/docs/release-notes/infrahub/release-1_0.mdx @@ -0,0 +1,252 @@ +--- +title: Release 1.0.0 +--- + + + + + + + + + + + + + + + + + + + +
Release Number1.0.0
Release DateOctober 30th, 2024
Release CodenameStockholm
Tag[infrahub-v1.0.0](https://github.com/opsmill/infrahub/releases/tag/infrahub-v1.0.0)
+ +# Release 1.0 + +We are thrilled to announce the general availability release of Infrahub, version *1.0*! + +We greatly appreciate all the time and efforts of our dedicated community of developers and beta testers! + +## Main changes + +Infrahub 1.0 is focused on bringing Infrahub to even more organizations and equipping them with a +production-ready solution to their infrastructure automation challenges. + +There are four key features and changes in Infrahub version 1.0 as outlined below: + +- SSO login capabilities (OIDC/OUATH2) +- A new permission framework +- Performance enhancements to version control actions (diff/merge/rebase) +- An exciting UI and navigation redesign + +In addition to these, there have been many bug-fixes and quality-of-life enhancements shown in the detailed changelog. + +### Single sign-on and user permissions + +Early in the development of Infrahub, we consciously decided to focus on the groundbreaking and critical features that make +Infrahub a uniquely world-class Source of Truth. +As a result, we set aside some standard enterprise features, such as Single Sign-On, until later, +knowing that they would be straightforward to implement in the system. + +As we enter a new era with Infrahub 1.0, we have focused on rounding out the enterprise-grade features +that our customers require. +For example, we added Single Sign-On (SSO) integrations for OIDC/Oauth 2.0 and implemented a robust and granular permissions system. +These two features combine to give organizations a level of control that allows them to trust their mission-critical +data in Infrahub and bring even closer integration with existing enterprise systems and workflows. + +#### SSO + +The new OIDC/OAuth2 capabilities, tested and functioning in the field with many Identity Providers +such as Keycloak, Authentik, and Google Auth, allow organizations to manage their users and groups centrally +instead of in the Infrahub UI. +Subsequent releases of Infrahub will soon include validated support for additional Identity Providers +and other authentication methods (such as LDAP). + +This functionality goes hand in hand with the feature we will discuss next: our new User Permissions structure. + +##### Documentation + +- https://docs.infrahub.app/guides/sso + +#### User permissions + +As users store more data in Infrahub and more teams interact with that data, it becomes crucial to protect it +from accidental changes. +By implementing a granular role-based permission system, Infrahub allows organizations to prevent unauthorized changes +to the data behind critical infrastructure automation efforts. + +In the permission structure introduced in Infrahub 1.0, Users are added to Groups, Groups are given Roles, +and Permissions are finally allocated to those Roles. +Permissions come in two fundamental varieties: Global Permissions and Object Permissions. + +The below diagram lays out the relationship between each of these entities. +A User can belong to one or more Groups, a Group can have multiple Roles assigned to it, and each Role can be granted one +or more Global or Object Permissions. + +!["Example relationship of User to Group to Role to Permission in Infrahub 1.0."](../../media/release_notes/infrahub_1_0/permissions_1_0.excalidraw.svg) + +##### Global vs. object permissions + +It is also worth mentioning the difference between Global Permissions and Object Permissions. + +- Global Permissions are specific permission sets that can give users system-wide rights to perform particular actions, for example: + - Editing the default Branch + - Editing Permissions + - Allowing the merging of proposed changes + - Account management +- Object Permissions are tied to individual objects within Infrahub and control what actions users can take on those objects; examples could include: + - Allow read-only access to all objects + - Deny the ability to update Tags + - Allow editing on any object type that starts with `DataCenter` + +For either style, Permissions are structured to be robust and granular by allowing complete control over +the Action, Decision, and Role of a given Permission set (plus the Object Type for Object Permissions). + +##### Documentation + +This is a significant topic on a new feature that enables complex workflows to meet organizational needs. Because of this, we strongly recommend diving into the documentation links below. + +- https://docs.infrahub.app/guides/accounts-permissions +- https://docs.infrahub.app/topics/permissions-roles +- https://docs.infrahub.app/reference/permissions + +### Performance improvements + +Our design philosophy at OpsMill has always been driven by long experience as network practitioners or admins +in Unix-like systems: “Make it work, make it right, make it fast.” + +In Infrahub 1.0, we focused on the last part, “Make it fast.” + +As our early beta testers began to utilize Infrahub in increasingly large infrastructures (greater than 50,000 nodes), +we expected (and found) opportunities for performance improvements. +This was especially true when performing Version Control actions on large data sets. +As a result of the intensive testing, we made dramatic improvements to branch change management. +Infrahub 1.0 has improved how Infrahub computes a difference between two branches, re-bases a branch, +and handles the merge. +For example, in the “diff” generation for a proposed change, we have seen a 30% increase in performance over earlier versions. +As a result, the proposed change functionality can now reliably handle much larger data sets for comparison. + +These performance improvements are only the beginning of our optimization efforts, but they have already improved +the experience of Infrahub users of all infrastructure sizes. + +### UI redesign + +While the UI in Infrahub before 1.0 served its purpose well, there was room for improvement. +A corollary fourth phrase to our design philosophy might be, “Now, make it pretty!” + +We didn’t just change around some colors or styles; we worked closely with our beta testers and a dedicated +User Experience professional to ensure that using Infrahub 1.0 would be an experience that our users enjoyed. + +!["Example screenshot of new UI in Infrahub 1.0."](../../media/release_notes/infrahub_1_0/1_0_ui.png) + +We also provided capabilities for complete customization of the navigation menu. +We continue to emphasize that Infrahub is a powerful, fully customizable system that meets your +organization where it needs to be. + +#### Documentation + +- https://docs.infrahub.app/guides/menu +- https://docs.infrahub.app/reference/menu + +## Other + +### Removed + +- Remove previously deprecated GET API endpoint "/api/schema/" ([#3884](https://github.com/opsmill/infrahub/issues/3884)) + +### Deprecated + +- Marked CoreAccount.role as deprecated + Due to the new permissions framework the account roles "admin" / "read-only" / "read-write" are deprecated and will be removed in Infrahub 1.1 + +### Added + +- Reworked branch selector: + - Redesigned the UI + - Added filter for branch + - Improved accessibility & keyboard navigation + - Improved UX on new branch form + - Added quick link to view all branches +- Add support to sign in with OAuth2 and Open ID Connect (OIDC) ([#1568](https://github.com/opsmill/infrahub/issues/1568)) +- Add internal HTTP adapter to allow for generic access from Infrahub ([#3302](https://github.com/opsmill/infrahub/issues/3302)) +- Add support to search a node by human friendly ID within a GraphQL query ([#3908](https://github.com/opsmill/infrahub/issues/3908)) +- Added link to our Discord server in the account menu +- Added permissions framework for global and object kind level permissions + + In this first iteration the object permissions are applied to nodes as a whole, in upcoming versions it will be possible to define attribute level permissions as well. +- New permissions system in UI: + - Implemented CRUD views for managing accounts, groups, roles, and permissions + - Updated all components to support new permission system + - Added dynamic message display according to user access levels + +### Fixed + +- The `infrahub-git` agent service has been renamed to `task-worker` in docker compose and the command to start it has been updated as well ([#1075](https://github.com/opsmill/infrahub/issues/1075)) +- Add ability to import repositories with default branch other than 'main' ([#3435](https://github.com/opsmill/infrahub/issues/3435)) +- Disable approve/merge/close buttons for merged Proposed Changes ([#3495](https://github.com/opsmill/infrahub/issues/3495)) +- Fixed regex validation for List type attributes ([#3929](https://github.com/opsmill/infrahub/issues/3929)) +- Allow users to run artifacts and generators on nodes without name attribute ([#4062](https://github.com/opsmill/infrahub/issues/4062)) +- In the schema, properly delete inherited attribute and relationship on Node when the original attribute or relationship are being deleted on the Generic ([#4301](https://github.com/opsmill/infrahub/issues/4301)) +- "Retry All" button for checks is bigger ([#4315](https://github.com/opsmill/infrahub/issues/4315)) +- Add a size restriction on common attribute kinds. Only TextArea and JSON support large values ([#4432](https://github.com/opsmill/infrahub/issues/4432)) +- The HFID of a related node is properly returned via GraphQL in all scenarios ([#4482](https://github.com/opsmill/infrahub/issues/4482)) +- Add full validation to BranchMerge and BranchRebase mutations ([#4595](https://github.com/opsmill/infrahub/issues/4595)) +- Report user-friendly error for invalid uniqueness_constraints when loading schemas ([#4677](https://github.com/opsmill/infrahub/issues/4677)) +- Fixed pagination query for nodes with order_by clause using non unique attributes ([#4700](https://github.com/opsmill/infrahub/issues/4700)) +- Fixed schema migration when an attribute previously present on a node is added back ([#4727](https://github.com/opsmill/infrahub/issues/4727)) +- Add order_weight property to multiple attributes and relationships in the demo schema to improve how some models are displayed in the list views +- Changed the Python SDK connection timeout to 60s +- Fix metric missing the query name in Prometheus data +- Fixes an issue where docker compose would output ANSI control characters that don't support it +- Prevent temporary directories generated by Docusaurus to be imported by Docker + +## Migration guide + +The process to migrate your instance of Infrahub to the latest version may vary depending on your deployment of Infrahub. +However, at a high-level, it will involve getting the latest version of the Infrahub code, and then performing any needed Database Migrations and Schema updates. + +Please ensure you have a **backup of your Infrahub environment** prior to attempting any migration or upgrade activities. + +### Migration of an Infrahub instance + +**First**, update the Infrahub version running in your environment. + +Below are some example ways to get the latest version of Infrahub in your environment. + +- For deployments via Docker Compose, update your container version by updating the `IMAGE_VER` environment variable and relaunch: + - `export IMAGE_VER="1.0.0"; docker compose pull && docker compose up -d` +- For deployments via Kubernetes, utilize the latest version of the Helm chart supplied with this release + +**Second**, once you have gotten the desired version of Infrahub in your environment, please run the following commands. + +> Note: If you are running Infrahub in Docker/K8s, these commands need to run from a container where Infrahub is installed. + +```shell +infrahub db migrate +infrahub db update-core-schema +``` + +**Finally**, restart all instances of Infrahub. + +### Migration of a dev or demo instance + +If you are using the `dev` or `demo` environments, we have provided `invoke` commands to aid in the migration to the latest version. +The below examples provide the `demo` version of the commands, however similar commands can be used for `dev` as well. + +```shell +invoke demo.stop +invoke demo.build +invoke demo.migrate +invoke demo.start +``` + +If you don't want to keep your data, you can start a clean instance with the following command. + +> **Warning: All data will be lost, please make sure to backup everything you need before running this command.** + +```shell +invoke demo.destroy demo.build demo.start demo.load-infra-schema demo.load-infra-data +``` + +The repository https://github.com/opsmill/infrahub-demo-edge has also been updated, it's recommended to pull the latest changes into your fork. \ No newline at end of file diff --git a/docs/docs/release-notes/infrahub/release-1_0-DRAFT.mdx b/docs/docs/release-notes/infrahub/release-1_0_1-DRAFT.mdx similarity index 96% rename from docs/docs/release-notes/infrahub/release-1_0-DRAFT.mdx rename to docs/docs/release-notes/infrahub/release-1_0_1-DRAFT.mdx index 185e7db431..28c209c497 100644 --- a/docs/docs/release-notes/infrahub/release-1_0-DRAFT.mdx +++ b/docs/docs/release-notes/infrahub/release-1_0_1-DRAFT.mdx @@ -1,5 +1,5 @@ --- -title: Release 0.16 - DEVELOPMENT +title: Release 1.0.1 - DEVELOPMENT --- @@ -22,7 +22,7 @@ title: Release 0.16 - DEVELOPMENT
-# Release 1.0 +# Release 1.0.1 ## Main changes diff --git a/docs/docs/topics/architecture.mdx b/docs/docs/topics/architecture.mdx index 44ed346db0..8bcd36668c 100644 --- a/docs/docs/topics/architecture.mdx +++ b/docs/docs/topics/architecture.mdx @@ -14,7 +14,8 @@ The main components are: - A **Frontend** written in React and rendered in the user's browser. - An **API server** written in Python with FastAPI. -- A **Git agent** written in Python to manage the interaction with external Git repositories. +- A **Task manager** based on `Prefect` to orchestrate workflow tasks. +- A **Task worker** written in Python to execute specific tasks such as managing the interaction with external Git repositories. - A **Graph database** based on `neo4j`. - A **Message bus** based on `RabbitMQ`. - A **Cache** based on `redis`. @@ -33,13 +34,23 @@ Multiple instance of the API Server can run at the same time to process more req ::: -### Git agent +### Task manager + +The Task manager is based on Prefect, and is responsible for orchestration of tasks to be delegated to one or more Task workers. + +### Task worker Language: Python -The Git agent is responsible for managing all the content related to the Git repositories. It organizes the file systems in order to quickly access any relevant commit. The Git Agent periodically pulls the Git server for updates and listens to the RPC channel on the event bus for tasks to execute. +The Task worker is responsible for managing all the content related to the Git repositories. It organizes the file systems in order to quickly access any relevant commit. The Task worker periodically pulls the Git server for updates and listens to the RPC channel on the event bus for tasks to execute. + +Currently there are three types of tasks: + +- *Internal* tasks +- *User* tasks +- *Git* tasks -Some of the tasks that can be executed on the Git agent includes: +Some of the tasks that can be executed on the Task worker include: - Rendering a Jinja template. - Rendering a transform function. @@ -48,7 +59,7 @@ Some of the tasks that can be executed on the Git agent includes: :::note -Multiple instance of the Git agent can run at the same time to process more requests. +Multiple instance of the Task worker can run at the same time to process more requests. ::: diff --git a/docs/docs/topics/artifact.mdx b/docs/docs/topics/artifact.mdx index 08afd41a3e..88b1982b8f 100644 --- a/docs/docs/topics/artifact.mdx +++ b/docs/docs/topics/artifact.mdx @@ -34,7 +34,7 @@ An **artifact definition** centralizes all the information required to generate - Format of the output - Information to extract from each target that must be passed to the transformation. -From an **artifact definition** artifact nodes are created, for each target which is part of the group. The result of the transformation is stored in the [object storage](./object-storage.mdx). The generation of the artifacts is performed by the Git agent(s). +From an **artifact definition** artifact nodes are created, for each target which is part of the group. The result of the transformation is stored in the [object storage](./object-storage.mdx). The generation of the artifacts is performed by the Task worker(s). ![](../media/topics/artifact/architecture.excalidraw.svg) diff --git a/docs/docs/topics/auth.mdx b/docs/docs/topics/auth.mdx index 3793295007..63cbda2a98 100644 --- a/docs/docs/topics/auth.mdx +++ b/docs/docs/topics/auth.mdx @@ -4,39 +4,110 @@ title: User management and authentication # User management and authentication -Infrahub now supports standard user management and authentication systems. - -A user account can have 3 levels of permissions - -- `admin` -- `read-write` -- `read-only` - -By default, Infrahub will allow anonymous access in read-only. It's possible to disable this via the configuration `main.allow_anonymous_access` or via the environment variable `INFRAHUB_ALLOW_ANONYMOUS_ACCESS`. +By default, Infrahub will allow anonymous access in read-only mode. It's possible to disable this via the configuration `main.allow_anonymous_access` or via the environment variable `INFRAHUB_ALLOW_ANONYMOUS_ACCESS`. ## Authentication mechanisms -Infrahub supports two authentication methods +Infrahub supports two authentication methods: -- JWT token: Short life tokens generated on demand from the API. -- API Token: Long life tokens generated ahead of time. +- **JWT Token**: Short-lived tokens generated on demand from the API. +- **API Token**: Long-lived tokens generated ahead of time. | | JWT | TOKEN | | ------------------ | ---- | ----- | | API / GraphQL | Yes | Yes | | Frontend | Yes | No | -| Python SDK | Soon | Yes | -| infrahubctl | Soon | Yes | +| Python SDK | Yes | Yes | +| infrahubctl | Yes | Yes | | GraphQL Playground | No | Yes | -More information on managing API token can be found in the [managing API tokens guide](/guides/managing-api-tokens). +More information on managing API tokens can be found in the [Managing API Tokens Guide](/guides/managing-api-tokens). :::info While using the API, the authentication token must be provided in the header: ```yaml -X-INFRAHUB-KEY: 06438eb2-8019-4776-878c-0941b1f1d1ec -``` +X-INFRAHUB-KEY: 06438eb2-8019-4776-878c-0941b1f1d1ec``` ::: + +## Users permissions management + +Users are allocated permissions through groups and roles. + +- **Users** are members of **Groups**. +- **Groups** are related with **Roles**. +- **Roles** are allocated **Permissions** (global or object-specific). + +Using roles and groups to manage permissions, **Infrahub offers a scalable way** to control access for numerous users simultaneously. For more detailed information, visit the [role and permissions page](/topics/permissions-roles). + +## Default setup + +Infrahub comes with a default configuration that contains pre-configured users, groups, and roles to simplify access management from the start. These **default settings** guarantee that key access and admin capabilities are ready to use out of the box. + +### Default account + +| Accounts | Description | +|-----------|------------------------------------------------------------------------------------------------------------------------------------------------------| +| **Admin** | The default administrative user in Infrahub. This user is part of the **Super Administrators** group, which gives them full system-wide permissions. | + +### Default groups + +| Group Name | Description | Assigned Role | +|------------------------|------------------------------------------------------------------------------------------------------------|-------------------------------| +| **Infrahub Users** | Standard users who have general access to Infrahub, with permission to view and interact with resources. | **General Access** | +| **Super Administrators**| Administrators with full control over the system. Users in this group have unrestricted access to all features. | **Super Administrator** | + +### Default roles + +#### General access + +This role gives standard users general permissions to view and interact with resources across the platform while restricting administrative actions. + +| Permission | Description | +|-------------------------------------------|-------------------------------------------------------------------------------------------------| +| `global:manage_repositories:allow_all` | Enables repository management for all branches. | +| `global:manage_schema:allow_all` | Permits global schema management. | +| `global:merge_proposed_change:allow_all` | Allows merging proposed changes across all branches. | +| `object:*:*:view:allow_all` | Allows seeing all objects, across all branches and namespaces. | +| `object:*:*:any:allow_other` | Permits executing any action on non-default branches for all object types. | + +#### Super administrator + +This role provides full administrative control over Infrahub. Users with this role can manage everything within the system. + +| Permission | Description | +|-------------------------------------|--------------------------------------------------------------------------------------------------------------| +| `global:super_admin:allow_all` | Permits complete administrative control, including schema, permissions, users, and repositories management. | + +#### Anonymous user + +If Infrahub is setup to allow anonymous access when it is first initialized, an additional account role called **Anonymous User** will be created. This role defines all the permissions that a user will inherit when not logged in. The default configuration for this role ships with two permissions: + +| Permission | Description | +|-------------------------------------|--------------------------------------------------------------------------------------------------------------| +| `object:*:*:any:deny` | Denies anything on all objects, across all branches (this one is not required, but it is more explicit) | +| `object:*:*:view:allow_all` | llows seeing all objects, across all branches and namespaces. | + +The role is defined by its name in Infrahub's configuration and can be adjusted by changing the setting `main.anonymous_access_role` or the environment variable `INFRAHUB_ANONYMOUS_ACCESS_ROLE`. + +Note that an anonymous user will never be able to make changes to data inside Infrahub whether the role's permisssions allow it or not. + +## Authentication backends + +Infrahub supports authenticating users in a local user store or by using single sign-on through an external identity provider. + +### Local user store + +Users can be created in the local user store. Local users can be added to groups, which can have roles assigned to them. + +### Single sign-on + +Infrahub supports identity providers that support either OAuth2 or OpenID Connect (OIDC). + +Multiple identity providers can be enabled simultaneously, for example to support organizations that use different providers for different security domains. + +A user that was authenticated using SSO will be created in the local user store of Infrahub and optionally automatically added to groups, which can have roles assigned to them. + +For more information on setting up single sign-on can be found in the [configuring single sign-on guide](/guides/sso). diff --git a/docs/docs/topics/graphql.mdx b/docs/docs/topics/graphql.mdx index 2a132920a5..7ee0f06853 100644 --- a/docs/docs/topics/graphql.mdx +++ b/docs/docs/topics/graphql.mdx @@ -8,17 +8,22 @@ The GraphQL interface is the main interface to interact with Infrahub. The Graph The endpoint to interact with the main branch is accessible at `https:///graphql`. To interact with a branch the URL must include the name of the branch, such as `https:///graphql/`. +If you need to extract the current GraphQL schema in your environment you can issue an HTTP get request to: + +- `https:////schema.graphql` +- `https:////schema.graphql?branch=some-other-branch` ## Query & mutations -For each model in the schema, a GraphQL query and 3 mutations will be generated based on the namespace and the name of the model. +In GraphQL, a query is used to fetch data and mutations are use to create/update or delete data. In Infrahub, a GraphQL query and 4 mutations will be generated for each model you define in the schema. The name of the query or mutation is based on the namespace and name of the model. For example, for the model `CoreRepository` the following query and mutations have been generated: -- `Query` : **CoreRepository** -- `Mutation` : **CoreRepositoryCreate** -- `Mutation` : **CoreRepositoryUpdate** -- `Mutation` : **CoreRepositoryDelete** +- `Query` : **CoreRepository** to fetch `CoreRepository` nodes from Infrahub +- `Mutation` : **CoreRepositoryCreate** to create a `CoreRepository` node +- `Mutation` : **CoreRepositoryUpdate** to update an existing `CoreRepository` node +- `Mutation` : **CoreRepositoryUpsert** to create or update a `CoreRepository` node +- `Mutation` : **CoreRepositoryDelete** to delete a `CoreRepository` node ### Query format @@ -26,16 +31,17 @@ The top level query for each model will always return a list of objects and the ```graphql query { - CoreRepository { # PaginatedCoreRepository object - count - edges { # EdgedCoreRepository object - node { # CoreRepository object - id - display_label - __typename - } - } + CoreRepository { # PaginatedCoreRepository object + count + edges { # EdgedCoreRepository object + node { # CoreRepository object + id + hfid + display_label + __typename + } } + } } ``` @@ -45,9 +51,13 @@ All list of objects will be nested under `edges` & `node` to make it possible to ::: -#### `ID` and `display_label` +#### `ID`, `hfid` and `display_label` + +For all nodes, the attribute `id`, `hfid` and `display_label` are automatically available. -For all nodes, the attribute `id` and `display_label` are automatically available. The value used to generate the `display_label` can be defined for each model in the schema. If no value has been provided a generic display label with the kind and the ID of the Node will be generated. +The value used to generate the `display_label` can be defined for each model in the schema. If no value has been provided a generic display label with the kind and the ID of the Node will be generated. + +The value used to generate the `hfid` can be defined for each model in the schema. If no value has been provided and the `model` has a single uniqueness constraint defined, then the `hfid` will be automatically generated from the uniqueness constraint. At the object level, there are mainly 3 types of resources that can be accessed, each with a different format: @@ -64,22 +74,22 @@ At the same level all the metadata of the attribute are also available example: ```graphql {6-14} title="Example query to access the value and the properties of the attribute 'name'" query { - CoreRepository { - count - edges { - node { - name { # TextAttribute object - value - is_protected - is_visible - source { - id - display_label - } - } - } + CoreRepository { + count + edges { + node { + name { # TextAttribute object + value + is_protected + is_visible + source { + id + display_label + } } + } } + } } ``` @@ -89,27 +99,28 @@ A relationship to another model with a cardinality of `One` will be represented ```graphql {6-19} title="Example query to access the peer and the properties of the relationship 'account', with a cardinality of one." query { - CoreRepository { - count - edges { - node { - account { - properties { - is_visible - is_propected - source { - id - display_label - } - } - node { - display_label - id - } - } + CoreRepository { + count + edges { + node { + account { + properties { + is_visible + is_propected + source { + id + display_label } + } + node { + display_label + hfid + id + } } + } } + } } ``` @@ -119,46 +130,48 @@ A relationship with a cardinality of `Many` will be represented with a `NestedPa ```graphql {6-20} title="Example query to access the relationship 'tags', with a cardinality of Many." query { - CoreRepository { - count - edges { + CoreRepository { + count + edges { + node { + tags { # NestedPaginatedBuiltinTag object + count + edges { # NestedEdgedBuiltinTag object + properties { + is_protected + source { + id + } + } node { - tags { # NestedPaginatedBuiltinTag object - count - edges { # NestedEdgedBuiltinTag object - properties { - is_protected - source { - id - } - } - node { - display_label - id - } - } - } + display_label + hfid + id } + } } + } } + } } ``` ### Mutations format -The format of the mutation to `Create` and `Update` an object has some similarities with the query format. The format will be slightly different for: +The format of the mutation to `Create`, `Update` and `Upsert` an object has some similarities with the query format. The format will be slightly different for: - An `Attribute` - A relationship of `Cardinality One` - A relationship of `Cardinality Many` -#### Create and update +#### Create, update and upsert -To `Create` or `Update` an object, the mutations will have the following properties. +To `Create`, `Update` or `Upsert` an object, the mutations will have the following properties. - The input for the mutation must be provided inside `data`. - All mutations will return `ok` and `object` to access some information after the mutation has been executed. -- For `Update`, it is mandatory to provide an `id`. +- `Update` mutations require you to provide an `id` or `hfid` to identify the object you want to update. +- `Upsert` mutations do not require you to provide the `id` or the `hfid`, but enough information needs to be provided for the back-end to uniquely identify the node. Typically this means that all the attribute or relationship values need to be provided that make up the `hfid` or `uniqueness_constraints` of the node. ```graphql mutation { @@ -166,17 +179,30 @@ mutation { data: { name: { value: "myrepop" }, # Attribute location: { value: "myrepop" }, # Attribute - account: { id: "myaccount" }, # Relationship One - tags: [ { id: "my_id" } ]} # Relationship Many + account: { hfid: ["my_account"] }, # Relationship One + tags: [ { hfid: ["my_tag"] } ]} # Relationship Many ) { ok object { id + hfid } } } ``` +#### Delete + +For a `Delete` mutation, we have to provide the `id` or the `hfid` of the node as part of the `data` argument. + +```graphql +mutation { + CoreRepositoryDelete(data: {hfid: ["myrepo"]}) { + ok + } +} +``` + ## Branch management In addition to the queries and the mutations automatically generated based on the schema, there are some queries and mutations to interact with the branches. diff --git a/docs/docs/topics/local-demo-environment.mdx b/docs/docs/topics/local-demo-environment.mdx index d96c7c2840..98dca32f9c 100644 --- a/docs/docs/topics/local-demo-environment.mdx +++ b/docs/docs/topics/local-demo-environment.mdx @@ -28,7 +28,7 @@ It's designed to be controlled by `invoke` using a list of predefined commands. | **message-queue** | rabbitmq:3.12-management | Message bus based on RabbitMQ | | **cache** | redis:7.2 | Cache based on Redis, mainly used for distributed lock | | **infrahub-server** | Dockerfile | Instance of the API server, running GraphQL | -| **infrahub-git** | Dockerfile | Instance of the Git agent, managing the Git Repository | +| **infrahub-git** | Dockerfile | Instance of the Task worker, managing the Git Repository | diff --git a/docs/docs/topics/object-storage.mdx b/docs/docs/topics/object-storage.mdx index 0676f0ab43..b39a322a01 100644 --- a/docs/docs/topics/object-storage.mdx +++ b/docs/docs/topics/object-storage.mdx @@ -15,7 +15,7 @@ At this moment Infrahub supports using local storage, or AWS S3 storage backends ### Local storage -Infrahub can use local storage as a storage backend. It can be any directory on a filesystem that is attached to the system on which Infrahub runs. The only requirement is that all the Infrahub API servers and Git Agents need access to the filesystem. +Infrahub can use local storage as a storage backend. It can be any directory on a filesystem that is attached to the system on which Infrahub runs. The only requirement is that all the Infrahub API servers and Task workers need access to the filesystem. To setup Infrahub to use local storage backend you can use the following configuration: diff --git a/docs/docs/topics/permissions-roles.mdx b/docs/docs/topics/permissions-roles.mdx new file mode 100644 index 0000000000..9ef256bccb --- /dev/null +++ b/docs/docs/topics/permissions-roles.mdx @@ -0,0 +1,74 @@ +--- +title: Permissions and Roles +--- + +# Roles and permissions + +Roles and permissions are essential for controlling user access and behavior in **Infrahub**. Within the platform, they **offer exact control** over what users can see, modify, or control. + +The permissions system is split into two main types: **Global** and **Object-specific**. These permissions aid in defining what users are permitted to do on particular system objects or throughout the system. + +More information on users authentication, can be found in the [User management and authentication Topic](/topics/auth). + +## Overview + +Permissions fall into two categories: **Global** and **Object-specific**, while roles act as **convenient bundles** of permissions. To simplify things further, **Account Groups** let you manage permissions for multiple users at once. + +- **GlobalPermissions** gives users system-wide rights to perform specific actions. **[See full list of available global permissions](/reference/permissions.mdx#global-permissions).** +- **ObjectPermissions** are tied to individual objects within Infrahub and control what actions users can take on those objects. **[See full list of available object permissions](/reference/permissions.mdx#object-permissions).** +- **AccountRoles** are groups of permissions you can assign to accounts. +- **AccountGroups** allow you to manage permissions for multiple users all at once. + +Permissions are allocated to users through groups and roles. +For more detailed information on this allocation, you can check [Users permissions management Section](/topics/auth.mdx#users-permissions-management). + +## Types of permissions + +### Global permissions + +With a **GlobalPermission**, a user may act on the entire system, not just on particular objects. **A person with the authority to handle accounts, for instance, can do so globally.** +**The action is blocked** if the required permission is not granted. + +:::info Example: + +Take the `global:manage_accounts:allow_all` permission: + +- **Action**: `manage_accounts` +- **Decision**: `Allow` + +This gives the user the ability to manage all user accounts. + +::: + +### Object permissions + +**ObjectPermission** specifies actions, that apply to a certain kind of object. Actions like create, update, remove, and view are supported. Depending on the kind of object or branch, object permissions may be granted or refused. + +**Key features**: + +- **Supports wildcards (`*`)** to apply permissions across multiple object types. +- **Can define different permissions per branch types (default or non-default branches).** +- **Grants or denies actions based on the assigned permission.** + +:::info Example: + +Here are some examples of object permissions and their descriptions: + +| Identifier | Object Type | Action | Decision | Description | +|-----------------------------------------------|---------------|---------|-------------|---------------------------------------------------------------------------------------------| +| `object:*:*:create:allow_other` | `*` (all types)| `any` | `allow_other`| Allows creating any object, but only on non-default branches. | +| `object:*:*:view:allow_all` | `*` (all types)| `view` | `allow_all` | Allows viewing any object, anywhere, across both default and non-default branches. | +| `object:Builtin:Tag:update:deny` | `BuiltinTag` | `update`| `deny` | Denies the ability to update any object of type BuiltinTag, across all branches. | +| `object:*:Generic:view:allow_all` | `*Generic` | `view` | `allow_all` | Allows viewing all objects that contain 'Generic' in their type (example: LocationGeneric, DeviceGeneric) in all namespaces, across all branches. | + +::: + +## Future developments + +The authorization structure for Infrahub is constantly changing. Here are some exciting upcoming features: + +- **Attribute-based permissions**: Grant permissions at the attribute level within objects. +- **Metadata-based permissions**: Use metadata to specify access controls. +- **Group-based permissions**: Deepen the integration of group memberships for permission assignments. + +These new features will make Infrahub's permission system even more powerful and flexible in the future. diff --git a/docs/docs/topics/repository.mdx b/docs/docs/topics/repository.mdx index f8c3322234..c9f367a809 100644 --- a/docs/docs/topics/repository.mdx +++ b/docs/docs/topics/repository.mdx @@ -23,13 +23,13 @@ See [this topic](/topics/infrahub-yml) for a full explanation of everything that ## Architecture {#architecture} -The [Infrahub web server](/reference/api-server) will never connect directly with external Git repositories. All interactions between Infrahub and remote Git repositories are handled by the [Git agent](/reference/git-agent). The Git agent(s) can work with any remote Git server that using either `git` or `http` protocols. The Infrahub web server can send commands to the Git agent via our message broker and the Git agent can send data back to the Infrahub web server via GraphQL mutations. +The [Infrahub web server](/reference/api-server) will never connect directly with external Git repositories. All interactions between Infrahub and remote Git repositories are handled by the [Task worker](/reference/git-agent). The Task worker(s) can work with any remote Git server that using either `git` or `http` protocols. The Infrahub web server can send commands to the Task worker via our message broker and the Task worker can send data back to the Infrahub web server via GraphQL mutations. ![](../media/repository_architecture.excalidraw.svg) -Infrahub stores all of the data that it needs for every remote repository in a directory defined by the `git.repositories_directory` setting in `infrahub.toml`. When the Git agent receives an instruction to update a remote repository, it pulls data from the remote repositories and saves it to the filesystem in the `git.repositories_directory` directory. The Git agent then parses the new data and sends the necessary GraphQL mutations to the Infrahub web server. Infrahub attempts to update `Repository` with any changes in the remote repository several times per minute. Read-only repositories are only updated when specifically requested. +Infrahub stores all of the data that it needs for every remote repository in a directory defined by the `git.repositories_directory` setting in `infrahub.toml`. When the Task worker receives an instruction to update a remote repository, it pulls data from the remote repositories and saves it to the filesystem in the `git.repositories_directory` directory. The Task worker then parses the new data and sends the necessary GraphQL mutations to the Infrahub web server. Infrahub attempts to update `Repository` with any changes in the remote repository several times per minute. Read-only repositories are only updated when specifically requested. -Please note that each Git agent must have access to the same directory on the file system so that they can share work among each other. +Please note that each Task worker must have access to the same directory on the file system so that they can share work among each other. ## Read-only Repository vs. Repository {#read-only-vs-core} @@ -45,13 +45,13 @@ Updates **to** remote | When merging Proposed Change | No ### Read-only Repository {#read-only-repository} -Read-only Repositories will only pull data from an external repository into Infrahub and will never push any data to the external repository. A Read-only Repository will pull changes from a single `ref` (branch, tag, or commit) into the Infrahub branch(es) on which it exists. Read-only repositories are not automatically updated. To update a Read-only Repository, you must manually update the `ref` property to a new value, then the Git agent will pull the appropriate commit and create the appropriate objects in Infrahub. +Read-only Repositories will only pull data from an external repository into Infrahub and will never push any data to the external repository. A Read-only Repository will pull changes from a single `ref` (branch, tag, or commit) into the Infrahub branch(es) on which it exists. Read-only repositories are not automatically updated. To update a Read-only Repository, you must manually update the `ref` property to a new value, then the Task worker will pull the appropriate commit and create the appropriate objects in Infrahub. See the [guide](/guides/repository) for instructions on pulling changes from read-only repositories in Infrahub. ### Repository {#repository} -When you create a `Repository`, Infrahub will try to pull every branch defined in the external repository and create an associated Infrahub branch with the same name and matching data according to what is defined in the `.infrahub.yml` configuration file on the particular remote branch. Infrahub will attempt to sync updates from the external repository several times per minute in a background task that runs on the Git agent(s). +When you create a `Repository`, Infrahub will try to pull every branch defined in the external repository and create an associated Infrahub branch with the same name and matching data according to what is defined in the `.infrahub.yml` configuration file on the particular remote branch. Infrahub will attempt to sync updates from the external repository several times per minute in a background task that runs on the Task worker(s). Editing a given GraphQL Query, Transform, Artifact Definition, or Schema within Infrahub **will not** result in those changes being pushed to the external repository and **could potentially be overwritten** when Infrahub pulls new commits from the external repository. Infrahub will only push changes to an external repository when a [Proposed Change](/topics/proposed-change) is merged for which the source and destination branch are both linked to branches on the same external repository. In this case, Infrahub will attempt to create a merge commit and push that commit to the destination branch on the external repository. diff --git a/docs/docs/topics/resource-manager.mdx b/docs/docs/topics/resource-manager.mdx index 389802e46b..a7435118f2 100644 --- a/docs/docs/topics/resource-manager.mdx +++ b/docs/docs/topics/resource-manager.mdx @@ -11,7 +11,7 @@ Examples: - allocating the next available IP Prefix out of a IP Prefix pool - allocating the next available IP addresses out of a IP Address pool - allocating the next available VLAN ID out of a pool of valid VLAN ID's -- allocating the next available device ID out of a pool of valid Device ID's +- allocating the next available ASN (Autonomous System Number) out of a number pool A resource manager in Infrahub allocates resources in a branch agnostic way. When you allocate a resource out of a pool in a branch, then that allocation happens in all the branches. @@ -22,7 +22,4 @@ Resources can be allocated in 2 different ways: - Direct allocation: A direct allocation is typically used when you don't need the resource to be related to another node in Infrahub, or if you want establish such a relation at a later time. For example, you want to allocated an IP address out of a pool that is not going to be linked to another node in Infrahub. - Relationship resource allocation: A resource can be allocated to a relationship of a node, when you create a new node. For example, when you want to create a device and assign an IP address out of a pool to an interface at device creation time. -## Known limitations - -- We only support resource pools for IP Prefixes and IP Addresses, support for other resource pools will come in future releases -- Direct resource allocations can only happen from the GraphQL API or the Python SDK, support for allocating resources in the web UI will come in a future release. +See the [guide](/guides/resource-manager) for instructions on creating and using resource manager in Infrahub. diff --git a/docs/docs/topics/resources-testing-framework.mdx b/docs/docs/topics/resources-testing-framework.mdx index 3d9fc9295c..d6c6279705 100644 --- a/docs/docs/topics/resources-testing-framework.mdx +++ b/docs/docs/topics/resources-testing-framework.mdx @@ -138,7 +138,7 @@ In this output we can see `infrahub-sdk-0.8.1` (0.8.1 being the Infrahub SDK ver Tests can also run as part of the Infrahub CI pipeline. This is a feature which allows to validate the proper behaviour of a proposed change. -This means that the Infrahub Git agent will take care of running the `pytest` process on behalf of users after creating a proposed change or updating it with new changes. User defined tests, if found, will be run as part of the CI pipeline and be logged in checks. One check per test is created which allows to see the outcome of it and an optional message that gives more details in case of failure. +This means that the Infrahub Task worker will take care of running the `pytest` process on behalf of users after creating a proposed change or updating it with new changes. User defined tests, if found, will be run as part of the CI pipeline and be logged in checks. One check per test is created which allows to see the outcome of it and an optional message that gives more details in case of failure. ## How testing work diff --git a/docs/docs/topics/schema.mdx b/docs/docs/topics/schema.mdx index 5530d15819..b09c0728ab 100644 --- a/docs/docs/topics/schema.mdx +++ b/docs/docs/topics/schema.mdx @@ -91,11 +91,107 @@ The `kind` of a model is generated by concatenating the `namespace` and the `nam #### Relationship kinds -- `Generic`: Default relationship without specific significance -- `Attribute`: Relationship of type Attribute are represented in the detailed view and the list view -- `Component`: Indicate a relationship with another node that is a component of the current node. Example: Interface is a component to a Device -- `Parent`: Indicate a relationship with another node that is a parent to the current node. Example: Device is a parent to an Interface -- `Group`: Indicate a relationship to a member or a subscriber of a group +- `Generic`: A flexible relationship with no specific functional significance. It is commonly used when an entity doesn't fit into specialized categories like `Component` or `Parent`. +- `Attribute`: A relationship where related entities' attributes appear directly in the detailed view and list views. It's used for linking key information, like statuses or roles. +- `Component`: This relationship indicates that one entity is part of another and appears in a separate tab in the detailed view of a node in the UI. It represents a composition-like relationship where one node is a component of the current node. +- `Parent`: This relationship defines a hierarchical link, with the parent entity often serving as a container or owner of another node. **Parent relationships are mandatory** and allow filtering in the UI, such as showing all components for a given parent. +- `Group`: Defines a relationship where a node (inheriting from `CoreNode`) is a member or subscriber to a group (inheriting from `CoreGroup`). These relationships appear in the "Manage Groups" form. +- `Profile`: A special relationship where a node is assigned a profile (inheriting from `CoreProfile`), visible during creation or updates through a "select profile" dropdown. + +:::info Complementary relationship + +Component and Parent typically belong together: +The `Component` relationship is typically paired with the `Parent` relationship. +This ensures a strong relationship in both directions, where the parent node can manage its components, and the component refers back to its parent. + +::: + +:::warning Cascade deletion +Relationships of kind `Component` include an implicit `on_delete: cascade`. This means that if you delete a node with a `Component` relationship, the related nodes connected by this relationship will also be deleted. +::: + +:::info Internal Usage + +Group and Profile are **internal** relationship: +The `Group` and `Profile` relationship kinds are internal types and should not be directly used by the user in their schema. +These are automatically handled by the system for managing memberships and configurations. + +::: + +To help you understand the relationship types better, here’s an example schema using a real-world model of **Car**, **Person**, **Wheel**, and **Car Group**. + +```yaml +version: "1.0" + +nodes: + - name: Car + namespace: Auto + description: "A vehicle used for transportation." + attributes: + - name: model + kind: Text + description: "The model of the car." + - name: year + kind: Number + description: "The manufacturing year of the car." + - name: license_plate + kind: Text + unique: true + description: "License plate number." + relationships: + - name: owner + peer: AutoPerson + kind: Attribute + cardinality: one + optional: false + - name: wheels + peer: AutoWheel + kind: Component + cardinality: many + + - name: Wheel + namespace: Auto + description: "A wheel of the car, a critical component for movement." + attributes: + - name: wheel_size + kind: Number + description: "Size of the wheel in inches." + - name: type + kind: Text + description: "Type of the wheel (e.g., alloy, steel)." + relationships: + # A wheel must belong to a car, hence the Parent relationship is mandatory + - name: car + peer: AutoCar + kind: Parent + cardinality: one + optional: false + + - name: Person + namespace: Auto + description: "A person who may own a car." + attributes: + - name: first_name + kind: Text + description: "First name of the person." + - name: last_name + kind: Text + description: "Last name of the person." + - name: driver_license_number + kind: Text + unique: true + description: "Driver's license number." + relationships: + - name: cars + peer: AutoCar + kind: Component + cardinality: many + optional: true +``` + +- A `Car` node might have an `owner` attribute linking it to a `Person`. This relationship will be visible in the car's detailed view. +- A `Wheel` is a component of a `Car`, meaning wheels are an essential part of the car. The wheels will be displayed in a separate "Components" tab. +- A `Car` can have a `ProfileCar` selected during its creation or update. The insurance details appear in the form where you can pick or assign a profile for the car.
Attribute kinds behavior in the UI @@ -143,18 +239,24 @@ The `kind` of a model is generated by concatenating the `namespace` and the `nam
+::: warning + +When you create a relationship of kind Component, automatically the `on_delete` property of the relationship will get set to the value `cascade`. This means that when you delete a node, all the related nodes of that relationship will also be deleted. + +::: + ### Uniqueness Constraints More complex uniqueness constraints, composed of multiple attributes and/or relationships can be defined at the Node or Generic level with the property `uniqueness_constraints`. It's possible to define multiple uniqueness constraints and each of them will be evaluated individually. -In the example below, the node schema `ExampleCar`, `["owner", "model__value"]` guarantee that a car will unique based on the `owner` and the `model` of the car. +In the example below, the node schema `ExampleCar`, `["owner", "model__value"]` guarantees that a car will be unique based on the `owner` and the `model` of the car. -`uniqueness_constraints` can be composed of a list of N number of attribute or relationship of cardinality one: +`uniqueness_constraints` can be composed of a list of N number of attributes or relationships of cardinality one: -- For an attribute, the valid format is `__value`. *Currently only value is supported but in the future the plan is to allow additional metadata to be used as well.* -- For a relationship, only the name of the relationship should be provided ``. *Only relationship of cardinality one are supported and the relationship must be mandatory.* +- For an attribute, the valid format is `__value`. *Currently only value is supported but in the future, the plan is to allow additional metadata to be used as well.* +- For a relationship, only the name of the relationship should be provided ``. *Only relationships of cardinality `one` are supported and the relationship must be mandatory.* ```yaml {10} showLineNumbers nodes: @@ -196,10 +298,10 @@ In the network industry: In the example below, each `ExamplePerson`, will have a `hfid` composed of his/her `lastname` and `firstname`. -`human_friendly_id` can be composed of N number of attribute or relationship of cardinality one: +`human_friendly_id` can be composed of N number of attributes or relationships of cardinality one: -- For an attribute, the valid format is `__value`. *Currently only value is supported but in the future the plan is to allow additional metadata to be used as well.* -- For a relationship, the name of the relationship and the name of a unique attribute must be provided `____value`. *Only relationship of cardinality one are supported and the relationship must be mandatory.* +- For an attribute, the valid format is `__value`. *Currently only value is supported but in the future, the plan is to allow additional metadata to be used as well.* +- For a relationship, the name of the relationship and the name of a unique attribute must be provided `____value`. *Only relationships of cardinality `one` are supported and the relationship must be mandatory.* ```yaml {4} showLineNumbers nodes: diff --git a/docs/docs/tutorials/getting-started/branches.mdx b/docs/docs/tutorials/getting-started/branches.mdx index 00eba0cf35..fec2f8d442 100644 --- a/docs/docs/tutorials/getting-started/branches.mdx +++ b/docs/docs/tutorials/getting-started/branches.mdx @@ -12,7 +12,7 @@ The default branch is called `main`. To get started, let's create a new **branch** that we'll call `cr1234`. -You can create a new branch in the frontend by using the button with a `+ sign` in the top right corner, next to the name of the current branch, i.e., `main`. +You can create a new branch in the frontend by using the button with a `+ sign` in the top left corner, next to the name of the current branch, i.e., `main`. Branch names are fairly permissive, but must conform to [git ref format](https://git-scm.com/docs/git-check-ref-format). For example, slashes (`/`) are allowed, tildes (`~`) are not. @@ -65,7 +65,7 @@ Branch names are fairly permissive, but must conform to [git ref format](https:/ ## Modify an organization via the UI -The name of the active branch in the top right corner should now be `cr1234`. +The name of the active branch in the top left corner should now be `cr1234`. 1. Select `Organization` under Object in the left menu (near the top). @@ -167,4 +167,4 @@ Go back to the detailed page for the Organization `my-first-tenant`. For an in-depth understanding of Infrahub's approach to handling differences between branches and merging them, please consult the [proposed change topic](/topics/proposed-change). -::: \ No newline at end of file +::: diff --git a/docs/docs/tutorials/getting-started/git-integration.mdx b/docs/docs/tutorials/getting-started/git-integration.mdx index 7098239cd3..b4c1355133 100644 --- a/docs/docs/tutorials/getting-started/git-integration.mdx +++ b/docs/docs/tutorials/getting-started/git-integration.mdx @@ -6,7 +6,7 @@ title: Integration with Git One of the three pillars Infrahub is built on is the idea of having unified storage for data and files. The data is stored in the graph database and the files are stored in Git. -When integrating a Git repository with Infrahub, the Git agent will ensure that both systems stay in sync at any time. Changes to branches or files in a Git repository will be synced to Infrahub automatically. +When integrating a Git repository with Infrahub, the Task worker will ensure that both systems stay in sync at any time. Changes to branches or files in a Git repository will be synced to Infrahub automatically. Please refer to [Repository](/topics/repository) to learn more about it. @@ -58,7 +58,7 @@ After adding the `infrahub-demo-edge` repository you will be able to see several :::note Troubleshooting -If you don't see additional objects under the transformations or `GraphQL Queries`, it's possible that the `Git agent` might not be running anymore. +If you don't see additional objects under the transformations or `GraphQL Queries`, it's possible that the `Task worker` might not be running anymore. In this case, you should run `invoke demo.start` first to ensure that everything is working. diff --git a/docs/docs/tutorials/getting-started/introduction-to-infrahub.mdx b/docs/docs/tutorials/getting-started/introduction-to-infrahub.mdx index 86e59711f3..d367972c57 100644 --- a/docs/docs/tutorials/getting-started/introduction-to-infrahub.mdx +++ b/docs/docs/tutorials/getting-started/introduction-to-infrahub.mdx @@ -17,7 +17,8 @@ During this tutorial we'll mainly use the Frontend, the `infrahubctl` CLI and th | **infrahubctl** | Command line utility to interact with Infrahub and manage some core objects like the branches or the schema. | `invoke demo.cli-git` | | **Frontend** | Main User interface | [http://localhost:8000](http://localhost:8000) | | **API server** | GraphQL and REST API server, primary component to interact with the data. | [http://localhost:8000/graphql](http://localhost:8000/graphql) | -| **Git agent** | Infrahub agent that manages all content hosted in Git. | --- | +| **Task manager** | Orchestrator of workflow tasks | --- | +| **Task worker** | Infrahub agent that manages all content hosted in Git. | --- | | **Git server** | External Git server like GitHub or GitLab that can host some Git repositories. | --- | | **GraphDB** | Main database based on neo4j where all information in the graph are stored. | --- | | **Cache** | Cache based on Redis. Mainly used to support the reservation of shared resources across all components. | --- | diff --git a/docs/docs/tutorials/getting-started/readme.mdx b/docs/docs/tutorials/getting-started/readme.mdx index 93438bbee5..15b5e61924 100644 --- a/docs/docs/tutorials/getting-started/readme.mdx +++ b/docs/docs/tutorials/getting-started/readme.mdx @@ -79,12 +79,18 @@ Refer to [User management](/topics/auth/) page for more information regarding th To follow the tutorial you should use the `admin` account but you can try the other accounts too to see how the interface behaves with different permission levels. -| name | username | password | role | -| ------------- | --------------- | ------------- | ---------- | -| Administrator | `admin` | `infrahub` | admin | -| Chloe O'Brian | `Chloe O'Brian` | `Password123` | read-write | -| David Palmer | `David Palmer` | `Password123` | read-write | -| Jack Bauer | `Jack Bauer` | `Password123` | read-only | +| name | username | password | group | +| --------------- | --------------- | ------------- | -------------------- | +| Administrator | `admin` | `infrahub` | Super Administrators | +| Sue Dough | `sudo` | `Password123` | Administrators | +| Chloe O'Brian | `cobrian` | `Password123` | Engineering Team | +| Sofia Hernandez | `shernandez` | `Password123` | Engineering Team | +| Ryan Patel | `rpatel` | `Password123` | Engineering Team | +| Jack Bauer | `jbauer` | `Password123` | Operations Team | +| Emily Lawson | `elawson` | `Password123` | Operations Team | +| Jacob Thompson | `jthompson` | `Password123` | Operations Team | +| David Palmer | `dpalmer` | `Password123` | Architecture Team | +| Olivia Carter | `ocarter` | `Password123` | Architecture Team | ## Access the Infrahub interfaces diff --git a/docs/sidebars.ts b/docs/sidebars.ts index 47e20ad177..22ff56b098 100644 --- a/docs/sidebars.ts +++ b/docs/sidebars.ts @@ -67,6 +67,8 @@ const sidebars: SidebarsConfig = { 'guides/installation', 'guides/create-schema', 'guides/import-schema', + 'guides/menu', + 'guides/accounts-permissions', 'guides/groups', 'guides/generator', 'guides/repository', @@ -102,10 +104,10 @@ const sidebars: SidebarsConfig = { 'topics/groups', 'topics/metadata', 'topics/object-storage', + 'topics/permissions-roles', 'topics/version-control', 'topics/proposed-change', 'topics/repository', - 'topics/schema', 'topics/transformation', 'topics/auth', 'topics/database-backup', @@ -139,6 +141,7 @@ const sidebars: SidebarsConfig = { 'reference/schema/validator-migration', ], }, + 'reference/menu', { type: 'category', label: 'infrahub cli', @@ -158,6 +161,7 @@ const sidebars: SidebarsConfig = { 'reference/api-server', 'reference/dotinfrahub', 'reference/infrahub-tests', + 'reference/permissions', 'reference/schema-validation' ], }, @@ -308,7 +312,9 @@ const sidebars: SidebarsConfig = { slug: 'release-notes/infrahub', }, items: [ - // 'release-notes/infrahub/release-1_0-DRAFT', + // 'release-notes/infrahub/release-1_0_1-DRAFT', + 'release-notes/infrahub/release-1_0', + 'release-notes/infrahub/release-0_16_4', 'release-notes/infrahub/release-0_16_3', 'release-notes/infrahub/release-0_16_2', 'release-notes/infrahub/release-0_16_1', diff --git a/frontend/app/biome.json b/frontend/app/biome.json index 67fe8fd929..bf337a6dc0 100644 --- a/frontend/app/biome.json +++ b/frontend/app/biome.json @@ -91,8 +91,10 @@ "suspicious": { "noArrayIndexKey": "off", "noAssignInExpressions": "off", - "noConsole": "off", - "noConsoleLog": "off", + "noConsole": { + "level": "error", + "options": { "allow": ["assert", "error", "info", "warn"] } + }, "noDoubleEquals": "off", "noEmptyBlock": "off", "noEmptyBlockStatements": "off", @@ -118,5 +120,13 @@ "attributePosition": "auto", "bracketSpacing": true } + }, + "graphql": { + "formatter": { + "enabled": true + }, + "linter": { + "enabled": true + } } } diff --git a/frontend/app/package-lock.json b/frontend/app/package-lock.json index a8405253e9..57fb9fb7da 100644 --- a/frontend/app/package-lock.json +++ b/frontend/app/package-lock.json @@ -62,6 +62,7 @@ "react-markdown": "^9.0.1", "react-paginate": "^8.2.0", "react-popper": "^2.3.0", + "react-resizable-panels": "^2.1.5", "react-router-dom": "^6.22.3", "react-simple-code-editor": "^0.13.1", "react-toastify": "^9.1.3", @@ -15190,6 +15191,16 @@ } } }, + "node_modules/react-resizable-panels": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/react-resizable-panels/-/react-resizable-panels-2.1.5.tgz", + "integrity": "sha512-JMSe18rYupmx+dzYcdfWYZ93ZdxqQmLum3xWDVSUMI0UVwl9bB9gUaFmPbxYoO4G+m5sqgdXQCYQxnOysytfnw==", + "license": "MIT", + "peerDependencies": { + "react": "^16.14.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc", + "react-dom": "^16.14.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + } + }, "node_modules/react-router": { "version": "6.27.0", "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.27.0.tgz", diff --git a/frontend/app/package.json b/frontend/app/package.json index a5202297ff..749d833a50 100644 --- a/frontend/app/package.json +++ b/frontend/app/package.json @@ -81,6 +81,7 @@ "react-markdown": "^9.0.1", "react-paginate": "^8.2.0", "react-popper": "^2.3.0", + "react-resizable-panels": "^2.1.5", "react-router-dom": "^6.22.3", "react-simple-code-editor": "^0.13.1", "react-toastify": "^9.1.3", diff --git a/frontend/app/src/components/account-menu.tsx b/frontend/app/src/components/account-menu.tsx index d06493e4ef..acc362f66d 100644 --- a/frontend/app/src/components/account-menu.tsx +++ b/frontend/app/src/components/account-menu.tsx @@ -91,7 +91,7 @@ const UnauthenticatedAccountMenu = () => { return ( @@ -174,7 +174,6 @@ const AuthenticatedAccountMenu = ({
{profile?.label?.value}
-
{profile?.role?.value}
{ return ( -
+
diff --git a/frontend/app/src/components/branch-selector.tsx b/frontend/app/src/components/branch-selector.tsx index dc5405898f..e7c494ac80 100644 --- a/frontend/app/src/components/branch-selector.tsx +++ b/frontend/app/src/components/branch-selector.tsx @@ -4,23 +4,28 @@ import { Branch } from "@/generated/graphql"; import { branchesState, currentBranchAtom } from "@/state/atoms/branches.atom"; import { branchesToSelectOptions } from "@/utils/branches"; import { Icon } from "@iconify-icon/react"; -import { useAtomValue } from "jotai/index"; +import { useAtomValue, useSetAtom } from "jotai"; import { useEffect, useState } from "react"; import { StringParam, useQueryParam } from "use-query-params"; import { ComboboxItem } from "@/components/ui/combobox"; -import { Command, CommandEmpty, CommandInput, CommandList } from "@/components/ui/command"; +import { Command, CommandInput, CommandItem, CommandList } from "@/components/ui/command"; import graphqlClient from "@/graphql/graphqlClientApollo"; import { useAuth } from "@/hooks/useAuth"; import { constructPath } from "@/utils/fetch"; -import { useSetAtom } from "jotai"; +import { useCommandState } from "cmdk"; import { Button, ButtonWithTooltip, LinkButton } from "./buttons/button-primitive"; import BranchCreateForm from "./form/branch-create-form"; +type DisplayForm = { + open: boolean; + defaultBranchName?: string; +}; + export default function BranchSelector() { const currentBranch = useAtomValue(currentBranchAtom); const [isOpen, setIsOpen] = useState(false); - const [displayForm, setDisplayForm] = useState(false); + const [displayForm, setDisplayForm] = useState({ open: false }); useEffect(() => { if (isOpen) graphqlClient.refetchQueries({ include: ["GetBranches"] }); @@ -30,7 +35,7 @@ export default function BranchSelector() { { - setDisplayForm(false); + setDisplayForm({ open: false }); setIsOpen(open); }} > @@ -50,15 +55,14 @@ export default function BranchSelector() { - {displayForm ? ( + {displayForm.open ? ( { - setDisplayForm(false); - }} + onCancel={() => setDisplayForm({ open: false })} onSuccess={() => { - setDisplayForm(false); + setDisplayForm({ open: false }); setIsOpen(false); }} + defaultBranchName={displayForm.defaultBranchName} data-testid="branch-create-form" /> ) : ( @@ -74,18 +78,14 @@ function BranchSelect({ setFormOpen, }: { setPopoverOpen: (open: boolean) => void; - setFormOpen: (open: boolean) => void; + setFormOpen: (displayForm: DisplayForm) => void; }) { const branches = useAtomValue(branchesState); const setCurrentBranch = useSetAtom(currentBranchAtom); const [, setBranchInQueryString] = useQueryParam(QSP.BRANCH, StringParam); const handleBranchChange = (branch: Branch) => { - if (branch.is_default) { - setBranchInQueryString(undefined); // undefined is needed to remove a parameter from the QSP - } else { - setBranchInQueryString(branch.name); - } + setBranchInQueryString(branch.is_default ? undefined : branch.name); setCurrentBranch(branch); setPopoverOpen(false); }; @@ -110,7 +110,10 @@ function BranchSelect({
- No branch found + setFormOpen({ open: true, defaultBranchName })} + /> + {branchesToSelectOptions(branches).map((branch) => ( vo ); } -export const BranchFormTriggerButton = ({ setOpen }: { setOpen: (open: boolean) => void }) => { +export const BranchFormTriggerButton = ({ + setOpen, +}: { + setOpen: (displayForm: DisplayForm) => void; +}) => { const { isAuthenticated } = useAuth(); + const handleClick = (e: React.MouseEvent) => { + e.stopPropagation(); + setOpen({ open: true }); + }; + + const handleKeyDown = (e: React.KeyboardEvent) => { + if (e.key === "Enter") { + e.stopPropagation(); + setOpen({ open: true }); + } + }; + return ( { - if (e.key === "Enter") { - e.stopPropagation(); - setOpen(true); - } - }} - onClick={(e) => { - e.stopPropagation(); - setOpen(true); - }} + onKeyDown={handleKeyDown} + onClick={handleClick} data-testid="create-branch-button" > ); }; + +const BranchNotFound = ({ onSelect }: { onSelect: (branchName: string) => void }) => { + const filteredCount = useCommandState((state) => state.filtered.count); + const search = useCommandState((state) => state.search); + + if (filteredCount !== 0) return null; + + return ( + onSelect(search)} + className="text-neutral-600 truncate gap-1" + > + Create branch {search} + + ); +}; diff --git a/frontend/app/src/components/buttons/rounded-button.tsx b/frontend/app/src/components/buttons/rounded-button.tsx deleted file mode 100644 index d2afa2ae8d..0000000000 --- a/frontend/app/src/components/buttons/rounded-button.tsx +++ /dev/null @@ -1,74 +0,0 @@ -// type ButtonProps = {}; - -import { classNames } from "@/utils/common"; -import { forwardRef } from "react"; -import { ButtonProps } from "./button"; - -export enum BUTTON_TYPES { - DEFAULT, - VALIDATE, - CANCEL, - WARNING, -} - -const DEFAULT_CLASS = (className?: string) => ` - ${className?.includes("p-") ? "" : "p-2"} - inline-flex items-center gap-x-1.5 rounded-full - text-sm font-semibold - focus-visible:outline focus-visible:outline-2 focus-visible:outline-offset-2 - shadow-sm ring-1 ring-inset ring-gray-300 -`; - -const getClassName = (type: BUTTON_TYPES) => { - switch (type) { - case BUTTON_TYPES.VALIDATE: { - return ` - bg-green-500 text-gray-50 - hover:bg-green-400 - disabled:cursor-not-allowed disabled:bg-green-400 disabled:text-gray-100 disabled:border-slate-200 disabled:shadow-none - `; - } - case BUTTON_TYPES.CANCEL: { - return ` - bg-red-600 text-gray-50 - hover:bg-red-400 - disabled:cursor-not-allowed disabled:bg-red-400 disabled:text-gray-100 disabled:border-slate-200 disabled:shadow-none - `; - } - case BUTTON_TYPES.WARNING: { - return ` - bg-yellow-400 text-gray-800 - hover:bg-yellow-300 - disabled:cursor-not-allowed disabled:bg-yellow-200 disabled:text-gray-600 disabled:border-slate-200 disabled:shadow-none - `; - } - case BUTTON_TYPES.DEFAULT: { - return ` - bg-gray-100 text-gray-900 - hover:bg-gray-200 - disabled:cursor-not-allowed disabled:bg-slate-50 disabled:text-slate-500 disabled:border-slate-200 disabled:shadow-none - `; - } - default: { - return "disabled:cursor-not-allowed disabled:bg-slate-50 disabled:text-slate-500 disabled:border-slate-200 disabled:shadow-none"; - } - } -}; - -export const RoundedButton = forwardRef((props: any, ref) => { - const { type, className, onClick, ...propsToPass } = props; - - const customClassName = getClassName(type); - - return ( - - ); -}); diff --git a/frontend/app/src/components/buttons/select-button.tsx b/frontend/app/src/components/buttons/select-button.tsx deleted file mode 100644 index 8d1dc39d21..0000000000 --- a/frontend/app/src/components/buttons/select-button.tsx +++ /dev/null @@ -1,72 +0,0 @@ -import { classNames } from "@/utils/common"; -import { Listbox, Transition } from "@headlessui/react"; -import { Icon } from "@iconify-icon/react"; -import { Fragment } from "react"; -import { BUTTON_TYPES, Button } from "./button"; - -// type SelectButtonProps = {}; - -export const SelectButton = (props: any) => { - const { label, value, valueLabel, onChange, options, renderOption } = props; - - return ( - - {({ open }) => ( - <> - {label} -
-
- -
- {valueLabel} -
- - -
-
- - - - {options.map((option: any) => ( - - classNames( - active ? "text-custom-white bg-custom-blue-800" : "text-gray-900", - "cursor-pointer select-none p-4 text-sm" - ) - } - value={option} - > - {(attributes) => - renderOption({ - option, - ...attributes, - }) - } - - ))} - - -
- - )} -
- ); -}; diff --git a/frontend/app/src/components/display/accordion.tsx b/frontend/app/src/components/display/accordion.tsx index 3a402ee152..41321f5521 100644 --- a/frontend/app/src/components/display/accordion.tsx +++ b/frontend/app/src/components/display/accordion.tsx @@ -6,6 +6,7 @@ export type AccordionProps = { title?: any; children?: any; className?: string; + titleClassName?: string; iconClassName?: string; defaultOpen?: boolean; style?: CSSProperties; @@ -19,6 +20,7 @@ export default function Accordion({ className, hideChevron, iconClassName, + titleClassName, ...props }: AccordionProps) { const [isOpen, setIsOpen] = useState(); @@ -27,24 +29,20 @@ export default function Accordion({ return (
-
-
setIsOpen(!open)} +
setIsOpen(!open)}> + - - {open ? : } - + {open ? : } + - {title} -
+ {title}
+ {open && children}
); diff --git a/frontend/app/src/components/display/avatar.tsx b/frontend/app/src/components/display/avatar.tsx index 6d5c45d61b..8a612bc46c 100644 --- a/frontend/app/src/components/display/avatar.tsx +++ b/frontend/app/src/components/display/avatar.tsx @@ -1,6 +1,7 @@ import LoadingScreen from "@/screens/loading-screen/loading-screen"; import { classNames } from "@/utils/common"; import { type VariantProps, cva } from "class-variance-authority"; +import { forwardRef } from "react"; export const initials = (name: string) => name @@ -34,7 +35,7 @@ interface tAvatar extends VariantProps { isLoading?: boolean; } -export const Avatar = (props: tAvatar) => { +export const Avatar = forwardRef((props: tAvatar, ref) => { const { name, text, variant, size, className, isLoading, ...otherProps } = props; if (isLoading) { @@ -46,10 +47,14 @@ export const Avatar = (props: tAvatar) => { } return ( -
+
+ ); -}; +}); diff --git a/frontend/app/src/components/display/background.tsx b/frontend/app/src/components/display/background.tsx deleted file mode 100644 index 4719641636..0000000000 --- a/frontend/app/src/components/display/background.tsx +++ /dev/null @@ -1,21 +0,0 @@ -import { classNames } from "@/utils/common"; -import { MouseEventHandler } from "react"; - -type tBackground = { - onClick?: MouseEventHandler; - className?: string; -}; - -export const Background = ({ onClick, className = "", ...propsToPass }: tBackground) => { - return ( -
- ); -}; diff --git a/frontend/app/src/components/display/slide-over.tsx b/frontend/app/src/components/display/slide-over.tsx index 8668312447..48b5cd517c 100644 --- a/frontend/app/src/components/display/slide-over.tsx +++ b/frontend/app/src/components/display/slide-over.tsx @@ -11,6 +11,7 @@ import ModalDelete from "../modals/modal-delete"; interface Props { open: boolean; setOpen: React.Dispatch>; + onClose?: () => void; children: React.ReactNode; title: string | React.ReactNode; offset?: number; @@ -22,8 +23,7 @@ interface SlideOverContextProps { export const SlideOverContext = React.createContext({}); -export default function SlideOver(props: Props) { - const { open, setOpen, title, offset = 0 } = props; +export default function SlideOver({ open, setOpen, onClose, title, offset = 0, children }: Props) { const initialFocusRef = useRef(null); const [preventClose, setPreventClose] = useState(false); @@ -43,7 +43,15 @@ export default function SlideOver(props: Props) { return ( - + { + setOpen(value); + if (onClose) onClose(); + }} + initialFocus={initialFocusRef} + > {title}
- {props.children} + {children}
diff --git a/frontend/app/src/components/filters/filters.tsx b/frontend/app/src/components/filters/filters.tsx index 48b9d3aad5..e8df443a82 100644 --- a/frontend/app/src/components/filters/filters.tsx +++ b/frontend/app/src/components/filters/filters.tsx @@ -47,7 +47,7 @@ export const Filters = ({ schema }: FiltersProps) => { return ( <> -
+
void; onSuccess?: (branch: Branch) => void; + defaultBranchName?: string; }; -const BranchCreateForm = ({ onCancel, onSuccess }: BranchCreateFormProps) => { +const BranchCreateForm = ({ defaultBranchName, onCancel, onSuccess }: BranchCreateFormProps) => { const [branches, setBranches] = useAtom(branchesState); const [, setBranchInQueryString] = useQueryParam(QSP.BRANCH, StringParam); const [createBranch] = useMutation(BRANCH_CREATE); @@ -60,6 +61,10 @@ const BranchCreateForm = ({ onCancel, onSuccess }: BranchCreateFormProps) => { + )}
); @@ -82,7 +82,7 @@ const ProfileSourceBadge = ({ fieldData }: { fieldData: AttributeValueFromProfil ); }; -const PoolSourceBadge = ({ fieldData }: { fieldData: RelationshipValueFormPool }) => { +const PoolSourceBadge = ({ fieldData }: { fieldData: RelationshipValueFromPool }) => { return ( options?: SelectOption[]; relationship: RelationshipSchema; schema: IModelSchema; + peerField?: string; }; export type DynamicFieldProps = @@ -128,7 +129,7 @@ export type DynamicFieldProps = export const isFormFieldValueFromPool = ( fieldData: FormFieldValue -): fieldData is RelationshipValueFormPool => fieldData.source?.type === "pool"; +): fieldData is RelationshipValueFromPool => fieldData.source?.type === "pool"; export type NumberPoolData = { id: string; diff --git a/frontend/app/src/components/form/utils/getFieldDefaultValue.ts b/frontend/app/src/components/form/utils/getFieldDefaultValue.ts index 9679b6005c..2781c381d3 100644 --- a/frontend/app/src/components/form/utils/getFieldDefaultValue.ts +++ b/frontend/app/src/components/form/utils/getFieldDefaultValue.ts @@ -1,6 +1,6 @@ import { ProfileData } from "@/components/form/object-form"; import { - AttributeValueFormPool, + AttributeValueFromPool, AttributeValueFromProfile, AttributeValueFromUser, FormAttributeValue, @@ -93,7 +93,7 @@ const getDefaultValueFromProfiles = ( const getDefaultValueFromPool = ( fieldName: string, objectData?: Record -): AttributeValueFormPool | null => { +): AttributeValueFromPool | null => { if (!objectData) return null; const currentField = objectData[fieldName]; diff --git a/frontend/app/src/components/form/utils/getFormFieldsFromSchema.ts b/frontend/app/src/components/form/utils/getFormFieldsFromSchema.ts index 6b002bcefb..3600ad12de 100644 --- a/frontend/app/src/components/form/utils/getFormFieldsFromSchema.ts +++ b/frontend/app/src/components/form/utils/getFormFieldsFromSchema.ts @@ -19,13 +19,13 @@ import { SCHEMA_ATTRIBUTE_KIND } from "@/config/constants"; import { AuthContextType } from "@/hooks/useAuth"; import { SchemaAttributeType } from "@/screens/edit-form-hook/dynamic-control-types"; import { store } from "@/state"; -import { genericsState, iGenericSchema, iNodeSchema, schemaState } from "@/state/atoms/schema.atom"; +import { IModelSchema, genericsState, schemaState } from "@/state/atoms/schema.atom"; import { sortByOrderWeight } from "@/utils/common"; import { AttributeType, RelationshipType } from "@/utils/getObjectItemDisplayValue"; import { getRelationshipOptions } from "@/utils/getSchemaObjectColumns"; type GetFormFieldsFromSchema = { - schema: iNodeSchema | iGenericSchema; + schema: IModelSchema; profiles?: Array; initialObject?: Record; auth?: AuthContextType; @@ -68,6 +68,7 @@ export const getFormFieldsFromSchema = ({ auth, owner: currentFieldValue?.owner, isProtected: !!currentFieldValue?.is_protected, + permissions: { update: currentFieldValue?.permissions?.update_value }, isReadOnly: attribute.read_only, }), type: attribute.kind as Exclude, diff --git a/frontend/app/src/components/form/utils/isFieldDisabled.ts b/frontend/app/src/components/form/utils/isFieldDisabled.ts index 43e9aab4d3..2e14c464d6 100644 --- a/frontend/app/src/components/form/utils/isFieldDisabled.ts +++ b/frontend/app/src/components/form/utils/isFieldDisabled.ts @@ -1,11 +1,15 @@ import { LineageOwner } from "@/generated/graphql"; import { AuthContextType } from "@/hooks/useAuth"; +import { PermissionDecisionData } from "@/screens/permission/types"; +import { store } from "@/state"; +import { currentBranchAtom } from "@/state/atoms/branches.atom"; export type IsFieldDisabledParams = { owner?: LineageOwner | null; auth?: AuthContextType; isProtected?: boolean; isReadOnly?: boolean; + permissions?: { update?: PermissionDecisionData | null }; }; export const isFieldDisabled = ({ @@ -13,12 +17,27 @@ export const isFieldDisabled = ({ auth, isProtected, isReadOnly, + permissions, }: IsFieldDisabledParams) => { - if (isReadOnly) return true; + const currentBranch = store.get(currentBranchAtom); - // Field is available if there is no owner and if is_protected is not set to true - if (!isProtected || !owner || auth?.permissions?.isAdmin) return false; + switch (permissions?.update) { + case "ALLOW": + return false; + case "ALLOW_DEFAULT": + return !currentBranch?.is_default; + case "ALLOW_OTHER": + return !!currentBranch?.is_default; + case "DENY": + return true; + default: { + if (isReadOnly) return true; - // Field is available only if is_protected is set to true and if the owner is the user - return owner?.id !== auth?.user?.id; + // Field is available if there is no owner and if is_protected is not set to true + if (!isProtected || !owner) return false; + + // Field is available only if is_protected is set to true and if the owner is the user + return owner?.id !== auth?.user?.id; + } + } }; diff --git a/frontend/app/src/components/form/utils/updateFormFieldValue.ts b/frontend/app/src/components/form/utils/updateFormFieldValue.ts index a334618cda..62c140cc19 100644 --- a/frontend/app/src/components/form/utils/updateFormFieldValue.ts +++ b/frontend/app/src/components/form/utils/updateFormFieldValue.ts @@ -1,15 +1,15 @@ import { PoolValue } from "@/components/form/pool-selector"; import { - AttributeValueFormPool, + AttributeValueFromPool, FormAttributeValue, FormFieldValue, FormRelationshipValue, - RelationshipValueFormPool, + RelationshipValueFromPool, } from "@/components/form/type"; import { isDeepEqual } from "remeda"; export const updateFormFieldValue = ( - newValue: Exclude["value"], + newValue: Exclude["value"], defaultValue?: FormFieldValue ): FormFieldValue => { if (defaultValue && isDeepEqual(newValue, defaultValue.value as typeof newValue)) { diff --git a/frontend/app/src/components/inputs/dropdown.tsx b/frontend/app/src/components/inputs/dropdown.tsx index 36245e107c..5eb16a2d24 100644 --- a/frontend/app/src/components/inputs/dropdown.tsx +++ b/frontend/app/src/components/inputs/dropdown.tsx @@ -27,6 +27,7 @@ import React, { forwardRef, HTMLAttributes, useState } from "react"; export type DropdownOption = { value: string; label: string; + badge?: string; color?: string; description?: string; }; @@ -58,10 +59,18 @@ export const DropdownItem = React.forwardRef< return ( -
- - {item.label} - +
+
+ + {item.label} + + + {item.badge && ( + + {item.badge} + + )} +

{item.description}

@@ -232,7 +241,11 @@ export const Dropdown = forwardRef( return ( - {selectItem?.label} +
+ {selectItem?.label} + + {selectItem?.badge && {selectItem?.badge}} +
diff --git a/frontend/app/src/components/inputs/select.tsx b/frontend/app/src/components/inputs/select.tsx index 272a0813ed..d7002f7928 100644 --- a/frontend/app/src/components/inputs/select.tsx +++ b/frontend/app/src/components/inputs/select.tsx @@ -80,6 +80,7 @@ export type SelectProps = { isUnique?: boolean; isInherited?: boolean; placeholder?: string; + peerField?: string; }; export const Select = forwardRef((props, ref) => { @@ -102,6 +103,7 @@ export const Select = forwardRef((props, ref) => { schema, placeholder, preventEmpty, + peerField, // Field used to build option label // eslint-disable-next-line @typescript-eslint/no-unused-vars, no-unused-vars isOptional, // Avoid proving useless props // eslint-disable-next-line @typescript-eslint/no-unused-vars, no-unused-vars @@ -154,7 +156,7 @@ export const Select = forwardRef((props, ref) => { // Query to fetch options only if a peer is defined // TODO: Find another solution for queries while loading schema const optionsQueryString = peer - ? getDropdownOptions({ kind: peer, parentFilter }) + ? getDropdownOptions({ kind: peer, parentFilter, peerField }) : "query { ok }"; const poolsQueryString = poolPeer ? getDropdownOptions({ kind: poolPeer }) : "query { ok }"; @@ -171,7 +173,7 @@ export const Select = forwardRef((props, ref) => { const loading = optionsLoading || poolsLoading; const data = hasBeenOpened ? optionsData : poolsData; - const labelQueryString = peer ? getObjectDisplayLabel({ kind: peer }) : "query { ok }"; + const labelQueryString = peer ? getObjectDisplayLabel({ kind: peer, peerField }) : "query { ok }"; const labelQuery = gql` ${labelQueryString} @@ -185,6 +187,7 @@ export const Select = forwardRef((props, ref) => { const optionsList = getOptionsFromRelationship({ options: optionsResult, schemas: schemaList, + peerField, }); const addOption: SelectOption = { @@ -799,7 +802,10 @@ export const Select = forwardRef((props, ref) => { const id = selectedOption?.id ?? value?.id ?? value; const { data } = await fetchLabel({ variables: { ids: [id] } }); - const label = data[peer]?.edges[0]?.node?.display_label; + const label = peerField + ? (data[peer]?.edges[0]?.node?.[peerField]?.value ?? + data[peer]?.edges[0]?.node?.[peerField]) + : data[peer]?.edges[0]?.node?.display_label; const newSelectedOption = { ...selectedOption, @@ -824,7 +830,9 @@ export const Select = forwardRef((props, ref) => { const { data } = await fetchLabel({ variables: { ids } }); const newSelectedOptions = data[peer]?.edges.map((edge) => ({ - name: edge.node.display_label, + name: peerField + ? (edge.node?.[peerField]?.value ?? edge.node?.[peerField]) + : edge.node.display_label, id: edge.node.id, })); diff --git a/frontend/app/src/components/notifications.tsx b/frontend/app/src/components/notifications.tsx index cf3dc6deb1..954a3fd2f0 100644 --- a/frontend/app/src/components/notifications.tsx +++ b/frontend/app/src/components/notifications.tsx @@ -12,7 +12,6 @@ export const Notifications = (props: any) => { `; const { data } = useSubscription(query); - console.log("data: ", data); return
OK
; }; diff --git a/frontend/app/src/components/search/search-anywhere.tsx b/frontend/app/src/components/search/search-anywhere.tsx index d6ae42d015..f69f2a367d 100644 --- a/frontend/app/src/components/search/search-anywhere.tsx +++ b/frontend/app/src/components/search/search-anywhere.tsx @@ -4,9 +4,9 @@ import { Input } from "@/components/ui/input"; import Kbd from "@/components/ui/kbd"; import { CollapsedButton } from "@/screens/layout/menu-navigation/components/collapsed-button"; import { classNames } from "@/utils/common"; -import { Combobox, Dialog, Transition } from "@headlessui/react"; +import { Combobox, Dialog } from "@headlessui/react"; import { Icon } from "@iconify-icon/react"; -import { Fragment, ReactNode, forwardRef, useEffect, useState } from "react"; +import { ReactNode, forwardRef, useEffect, useState } from "react"; import { Link, LinkProps, useNavigate } from "react-router-dom"; import { SearchActions } from "./search-actions"; import { SearchDocs } from "./search-docs"; @@ -23,9 +23,9 @@ const SearchAnywhereTriggerButton = ({ className, ...props }: ButtonProps) => { data-testid="search-anywhere-trigger" {...props} > -
+
@@ -81,54 +81,39 @@ export function SearchAnywhere({ className = "", isCollapsed }: SearchModalProps /> )} - - - -
- - -
-
- - - -
+ +
+ +
+
+
-
- +
+
); } type SearchAnywhereProps = { + className?: string; onSelection: (url?: string) => void; }; const SearchAnywhereDialog = forwardRef( - ({ onSelection }, forwardedRef) => { + ({ className, onSelection }, forwardedRef) => { const navigate = useNavigate(); const [query, setQuery] = useState(""); return ( ; - tooltipFormatter?: (value: any) => React.ReactNode; - legendFormatter?: (name: string, value?: any) => React.ReactNode; -}; -export const PieChart = ({ data, tooltipFormatter, legendFormatter }: PieChartProps) => { - return ( - - - - - ( - - {legendFormatter ? legendFormatter(name, payload?.value) : name} - - )} - /> - - - ); -}; diff --git a/frontend/app/src/components/tabs-routes.tsx b/frontend/app/src/components/tabs-routes.tsx index d405f0d643..5adcb1f07d 100644 --- a/frontend/app/src/components/tabs-routes.tsx +++ b/frontend/app/src/components/tabs-routes.tsx @@ -13,7 +13,7 @@ type TabProps = { }; function Tab({ to, label, isLoading, error, count }: TabProps) { - const match = useMatch(to); + const match = useMatch(to.split("?")[0]); return ( , - React.ComponentPropsWithoutRef ->(({ className, children, ...props }, ref) => ( + React.ComponentPropsWithoutRef & { iconClassName?: string } +>(({ className, children, iconClassName, ...props }, ref) => ( iconify-icon]:rotate-180", + "flex flex-1 items-center py-4 font-medium transition-all [&[data-state=open]>div>iconify-icon]:rotate-90", className )} {...props} > - {children} + +
+ +
)); @@ -34,12 +37,14 @@ export const AccordionTrigger = React.forwardRef< export const AccordionContent = React.forwardRef< React.ElementRef, React.ComponentPropsWithoutRef ->(({ className, children, ...props }, ref) => ( +>(({ className, children, style, ...props }, ref) => ( -
{children}
+
+ {children} +
)); diff --git a/frontend/app/src/components/ui/dropdown-menu.tsx b/frontend/app/src/components/ui/dropdown-menu.tsx index 9c9a8630ce..dd7764a15f 100644 --- a/frontend/app/src/components/ui/dropdown-menu.tsx +++ b/frontend/app/src/components/ui/dropdown-menu.tsx @@ -104,10 +104,10 @@ export const DropdownMenuSubContent = forwardRef< export const DropdownMenuAccordion = forwardRef< ElementRef, - ComponentPropsWithoutRef ->((props, ref) => { + ComponentPropsWithoutRef & { defaultOpen?: boolean } +>(({ defaultOpen, ...props }, ref) => { return ( - + ); diff --git a/frontend/app/src/components/ui/popover.tsx b/frontend/app/src/components/ui/popover.tsx index 3560b1e3da..cd8c22c2b0 100644 --- a/frontend/app/src/components/ui/popover.tsx +++ b/frontend/app/src/components/ui/popover.tsx @@ -22,6 +22,9 @@ export const PopoverContent = React.forwardRef< sideOffset={sideOffset} className={classNames( "z-10 rounded-md border p-2 bg-custom-white shadow-xl outline-none text-sm max-w-[100vw]", + "data-[state=open]:animate-in data-[state=open]:fade-in-0", + "data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=closed]:zoom-out-95", + "data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2", className )} {...props} diff --git a/frontend/app/src/components/ui/resizable.tsx b/frontend/app/src/components/ui/resizable.tsx new file mode 100644 index 0000000000..5dab999b9d --- /dev/null +++ b/frontend/app/src/components/ui/resizable.tsx @@ -0,0 +1,39 @@ +import { classNames } from "@/utils/common"; +import React from "react"; +import * as ResizablePrimitive from "react-resizable-panels"; + +export const ResizablePanelGroup = ({ + className, + ...props +}: React.ComponentProps) => ( + +); + +export const ResizablePanel = ResizablePrimitive.Panel; + +export const ResizableHandle = ({ + withHandle, + className, + ...props +}: React.ComponentProps & { + withHandle?: boolean; +}) => ( + +); diff --git a/frontend/app/src/components/ui/scroll-area.tsx b/frontend/app/src/components/ui/scroll-area.tsx index 5d536bb255..e507689571 100644 --- a/frontend/app/src/components/ui/scroll-area.tsx +++ b/frontend/app/src/components/ui/scroll-area.tsx @@ -6,8 +6,11 @@ import * as React from "react"; export const ScrollArea = React.forwardRef< React.ElementRef, - React.ComponentPropsWithoutRef ->(({ className, children, ...props }, ref) => ( + React.ComponentPropsWithoutRef & { + scrollX?: boolean; + scrollY?: boolean; + } +>(({ className, children, scrollX = false, scrollY = true, ...props }, ref) => ( {children} - - + {scrollX && } + {scrollY && } )); diff --git a/frontend/app/src/config/config.ts b/frontend/app/src/config/config.ts index 5fbbd8db0a..4a48cd7cbe 100644 --- a/frontend/app/src/config/config.ts +++ b/frontend/app/src/config/config.ts @@ -52,6 +52,4 @@ export const CONFIG = { STORAGE_DETAILS_URL: (id: string) => `${INFRAHUB_API_SERVER_URL}/api/storage/object/${id}`, MENU_URL: (branch?: string) => `${INFRAHUB_API_SERVER_URL}/api/menu${branch ? `?branch=${branch}` : ""}`, - MENU_URL_OLD: (branch?: string) => - `${INFRAHUB_API_SERVER_URL}/api/menu${branch ? `?branch=${branch}` : ""}`, }; diff --git a/frontend/app/src/config/constants.tsx b/frontend/app/src/config/constants.tsx index e227ce13d7..80a55b9056 100644 --- a/frontend/app/src/config/constants.tsx +++ b/frontend/app/src/config/constants.tsx @@ -65,8 +65,6 @@ export const NUMBER_POOL_OBJECT = "CoreNumberPool"; export const TASK_OBJECT = "InfrahubTask"; -export const WRITE_ROLES = ["admin", "read-write"]; - export const ADMIN_ROLES = ["admin"]; export const MENU_EXCLUDELIST = [ @@ -79,7 +77,15 @@ export const MENU_EXCLUDELIST = [ "InternalRefreshToken", "CoreThreadComment", "CoreArtifactCheck", + "CoreArtifactTarget", + "CoreCheck", + "CoreComment", + "CoreGeneratorCheck", + "CoreGeneratorValidator", + "CoreNode", "CoreStandardCheck", + "CoreTaskTarget", + "CoreThread", "CoreDataCheck", "CoreFileCheck", "CoreSchemaCheck", @@ -87,6 +93,10 @@ export const MENU_EXCLUDELIST = [ "CoreDataValidator", "CoreRepositoryValidator", "CoreArtifactValidator", + "CoreUserValidator", + "CoreValidator", + "LineageOwner", + "LineageSource", ]; export const NODE_PATH_EXCLUDELIST = ["property"]; diff --git a/frontend/app/src/graphql/queries/diff/getDiff.ts b/frontend/app/src/graphql/queries/diff/getDiff.ts deleted file mode 100644 index e71922bc29..0000000000 --- a/frontend/app/src/graphql/queries/diff/getDiff.ts +++ /dev/null @@ -1,69 +0,0 @@ -import Handlebars from "handlebars"; - -export type DiffOptions = { - branch: string; - time_from?: string; - time_to?: string; - branch_only?: boolean; -}; - -export const getDiff = Handlebars.compile(` -query { - diff ({{{options}}}) { - nodes { - id - kind - changed_at - action - attributes { - id - name - changed_at - action - properties { - type - changed_at - action - value { - new - previous - __typename - } - __typename - } - __typename - } - __typename - } - files { - repository - location - action - __typename - } - relationships { - id - name - nodes { - id - kind - __typename - } - properties { - type - changed_at - action - value { - new - previous - __typename - } - __typename - } - changed_at - action - __typename - } - } -} -`); diff --git a/frontend/app/src/graphql/queries/groups/getGroupDetails.ts b/frontend/app/src/graphql/queries/groups/getGroupDetails.ts deleted file mode 100644 index 83946ca191..0000000000 --- a/frontend/app/src/graphql/queries/groups/getGroupDetails.ts +++ /dev/null @@ -1,41 +0,0 @@ -import Handlebars from "handlebars"; - -export const getGroupDetails = Handlebars.compile(` -query {{kind}} { - {{kind}} (ids: ["{{groupid}}"]) { - edges { - node { - id - display_label - - {{#each attributes}} - {{this.name}} { - value - updated_at - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - {{/each}} - - members { - count - } - - subscribers { - count - } - } - } - } -} -`); diff --git a/frontend/app/src/graphql/queries/objects/dropdownOptions.ts b/frontend/app/src/graphql/queries/objects/dropdownOptions.ts index bd3557db87..a0df15c9df 100644 --- a/frontend/app/src/graphql/queries/objects/dropdownOptions.ts +++ b/frontend/app/src/graphql/queries/objects/dropdownOptions.ts @@ -7,6 +7,11 @@ export const getDropdownOptions = Handlebars.compile(`query DropdownOptions { node { id display_label + {{#if peerField}} + {{peerField}}{ + value + } + {{/if}} __typename } } diff --git a/frontend/app/src/graphql/queries/objects/dropdownOptionsForRelatedPeers.ts b/frontend/app/src/graphql/queries/objects/dropdownOptionsForRelatedPeers.ts deleted file mode 100644 index 7e10148db5..0000000000 --- a/frontend/app/src/graphql/queries/objects/dropdownOptionsForRelatedPeers.ts +++ /dev/null @@ -1,25 +0,0 @@ -import Handlebars from "handlebars"; - -export interface iPeerDropdownOption { - id: string; - display_label: string; -} - -export interface iPeerDropdownOptions { - [peer: string]: iPeerDropdownOption[]; -} - -export const getDropdownOptionsForRelatedPeersPaginated = Handlebars.compile(`query DropdownFormOptions { - {{#each peers}} - {{this}} { - count - edges { - node { - id - display_label - __typename - } - } - } - {{/each}} -}`); diff --git a/frontend/app/src/graphql/queries/objects/getFilters.ts b/frontend/app/src/graphql/queries/objects/getFilters.ts deleted file mode 100644 index 72c441fcba..0000000000 --- a/frontend/app/src/graphql/queries/objects/getFilters.ts +++ /dev/null @@ -1,9 +0,0 @@ -import Handlebars from "handlebars"; - -export const getFilters = Handlebars.compile(`query {{kind.value}} { - {{name}} { - id - display_label - } -} -`); diff --git a/frontend/app/src/graphql/queries/objects/getObjectDisplayLabel.ts b/frontend/app/src/graphql/queries/objects/getObjectDisplayLabel.ts index 528eaefc8c..cb9f031093 100644 --- a/frontend/app/src/graphql/queries/objects/getObjectDisplayLabel.ts +++ b/frontend/app/src/graphql/queries/objects/getObjectDisplayLabel.ts @@ -7,6 +7,11 @@ query {{kind}}($ids: [ID]) { node{ id display_label + {{#if peerField}} + {{peerField}}{ + value + } + {{/if}} } } } diff --git a/frontend/app/src/graphql/queries/objects/objectTreeQuery.tsx b/frontend/app/src/graphql/queries/objects/objectTreeQuery.tsx index c487f938c7..d4f9b1f5d5 100644 --- a/frontend/app/src/graphql/queries/objects/objectTreeQuery.tsx +++ b/frontend/app/src/graphql/queries/objects/objectTreeQuery.tsx @@ -2,7 +2,11 @@ import Handlebars from "handlebars"; export const objectTopLevelTreeQuery = Handlebars.compile(` query GET_{{kind}}_TOP_LEVEL_TREE { - {{kind}}(parent__isnull: true, limit: null) { + {{kind}}( + {{#if filters}}{{{filters}}}{{/if}} + parent__isnull: true + limit: null + ) { edges { node { id diff --git a/frontend/app/src/graphql/queries/objects/updateObjectDetails.ts b/frontend/app/src/graphql/queries/objects/updateObjectDetails.ts deleted file mode 100644 index 1103d49d48..0000000000 --- a/frontend/app/src/graphql/queries/objects/updateObjectDetails.ts +++ /dev/null @@ -1,53 +0,0 @@ -import Handlebars from "handlebars"; - -export const updateObjectDetails = Handlebars.compile(`query {{kind.value}} { - {{name}} (ids: ["{{objectid}}"]) { - id - display_label - {{#each attributes}} - {{this.name}} { - value - updated_at - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - {{/each}} - {{#each relationships}} - {{this.name}} { - id - display_label - __typename - _relation__is_visible - _relation__is_protected - _updated_at - _relation__owner { - id - display_label - __typename - } - _relation__source { - id - display_label - __typename - } - } - {{/each}} - } - {{#each peers}} - {{this}} { - id - display_label - } - {{/each}} -} -`); diff --git a/frontend/app/src/graphql/queries/proposed-changes/getProposedChangesDetails.ts b/frontend/app/src/graphql/queries/proposed-changes/getProposedChangesDetails.ts index 41feb610e1..c12e384dac 100644 --- a/frontend/app/src/graphql/queries/proposed-changes/getProposedChangesDetails.ts +++ b/frontend/app/src/graphql/queries/proposed-changes/getProposedChangesDetails.ts @@ -1,74 +1,4 @@ import { gql } from "@apollo/client"; -import Handlebars from "handlebars"; - -export const getProposedChanges = Handlebars.compile(` -query GET_PROPOSED_CHANGES($id: ID, $nodeId: String, $state: String) { - {{kind}}(ids: [$id], state__value: $state) { - count - edges { - node { - id - display_label - __typename - _updated_at - - {{#each attributes}} - {{this.name}} { - value - } - {{/each}} - - {{#each relationships}} - {{this.name}} { - {{#if this.paginated}} - edges { - {{/if}} - node { - id - display_label - } - {{#if this.paginated}} - } - {{/if}} - } - {{/each}} - - comments{ - count - } - - created_by { - node { - id - display_label - } - } - } - } - } - - {{#if accountKind}} - - {{accountKind}} { - edges { - node { - id - display_label - } - } - } - - {{/if}} - - {{#if taskKind}} - - {{taskKind}}(related_node__ids: [$nodeId]) { - count - } - - {{/if}} -} -`); export const GET_PROPOSED_CHANGE_DETAILS = gql` query GET_PROPOSED_CHANGE_DETAILS($id: ID, $nodeId: String, $state: String) { diff --git a/frontend/app/src/graphql/queries/proposed-changes/getThreadsAndChecks.ts b/frontend/app/src/graphql/queries/proposed-changes/getThreadsAndChecks.ts deleted file mode 100644 index 1778e10eb5..0000000000 --- a/frontend/app/src/graphql/queries/proposed-changes/getThreadsAndChecks.ts +++ /dev/null @@ -1,46 +0,0 @@ -import Handlebars from "handlebars"; - -export const getThreadsAndChecks = Handlebars.compile(` -query getThreadsAndChecksFor{{kind}} { - {{kind}}( - change__ids: "{{id}}" - ) { - count - edges { - node { - __typename - id - object_path { - value - } - comments { - count - } - } - } - } - - CoreValidator( - proposed_change__ids: ["{{id}}"] - ) { - edges { - node { - checks { - edges { - node { - id - ... on CoreDataCheck { - conflicts { - value - } - } - __typename - } - } - } - } - } - } - -} -`); diff --git a/frontend/app/src/graphql/queries/role-management/getAccounts.ts b/frontend/app/src/graphql/queries/role-management/getAccounts.ts index d753b52e79..ad7581b0e2 100644 --- a/frontend/app/src/graphql/queries/role-management/getAccounts.ts +++ b/frontend/app/src/graphql/queries/role-management/getAccounts.ts @@ -1,8 +1,8 @@ import { gql } from "@apollo/client"; export const GET_ROLE_MANAGEMENT_ACCOUNTS = gql` - query GET_ROLE_MANAGEMENT_ACCOUNTS { - CoreGenericAccount { + query GET_ROLE_MANAGEMENT_ACCOUNTS($search: String) { + CoreGenericAccount(any__value: $search, partial_match: true) { count edges { node { diff --git a/frontend/app/src/graphql/queries/role-management/getGlobalPermissions.ts b/frontend/app/src/graphql/queries/role-management/getGlobalPermissions.ts index aa28308d2f..702afa0d4e 100644 --- a/frontend/app/src/graphql/queries/role-management/getGlobalPermissions.ts +++ b/frontend/app/src/graphql/queries/role-management/getGlobalPermissions.ts @@ -1,15 +1,13 @@ import { gql } from "@apollo/client"; export const GET_ROLE_MANAGEMENT_GLOBAL_PERMISSIONS = gql` - query GET_ROLE_MANAGEMENT_GLOBAL_PERMISSIONS { - CoreGlobalPermission { + query GET_ROLE_MANAGEMENT_GLOBAL_PERMISSIONS($search: String) { + CoreGlobalPermission(any__value: $search, partial_match: true) { + count edges { node { id display_label - name { - value - } action { value } @@ -21,6 +19,7 @@ export const GET_ROLE_MANAGEMENT_GLOBAL_PERMISSIONS = gql` edges { node { id + display_label } } } diff --git a/frontend/app/src/graphql/queries/role-management/getGroups.ts b/frontend/app/src/graphql/queries/role-management/getGroups.ts index 91631f9d2e..cc5e3310cc 100644 --- a/frontend/app/src/graphql/queries/role-management/getGroups.ts +++ b/frontend/app/src/graphql/queries/role-management/getGroups.ts @@ -1,8 +1,9 @@ import { gql } from "@apollo/client"; export const GET_ROLE_MANAGEMENT_GROUPS = gql` - query GET_ROLE_MANAGEMENT_GROUPS { - CoreAccountGroup { + query GET_ROLE_MANAGEMENT_GROUPS($search: String) { + CoreAccountGroup(any__value: $search, partial_match: true) { + count edges { node { id diff --git a/frontend/app/src/graphql/queries/role-management/getObjectPermissions.ts b/frontend/app/src/graphql/queries/role-management/getObjectPermissions.ts index b8a98c8b4a..781899da3d 100644 --- a/frontend/app/src/graphql/queries/role-management/getObjectPermissions.ts +++ b/frontend/app/src/graphql/queries/role-management/getObjectPermissions.ts @@ -1,8 +1,9 @@ import { gql } from "@apollo/client"; export const GET_ROLE_MANAGEMENT_OBJECT_PERMISSIONS = gql` - query GET_ROLE_MANAGEMENT_OBJECT_PERMISSIONS { - CoreObjectPermission { + query GET_ROLE_MANAGEMENT_OBJECT_PERMISSIONS($search: String) { + CoreObjectPermission(any__value: $search, partial_match: true) { + count edges { node { id @@ -24,6 +25,7 @@ export const GET_ROLE_MANAGEMENT_OBJECT_PERMISSIONS = gql` edges { node { id + display_label } } } diff --git a/frontend/app/src/graphql/queries/role-management/getRoles.ts b/frontend/app/src/graphql/queries/role-management/getRoles.ts index c2f2b844c5..1fee3cca7f 100644 --- a/frontend/app/src/graphql/queries/role-management/getRoles.ts +++ b/frontend/app/src/graphql/queries/role-management/getRoles.ts @@ -1,8 +1,9 @@ import { gql } from "@apollo/client"; export const GET_ROLE_MANAGEMENT_ROLES = gql` - query GET_ROLE_MANAGEMENT_ROLES { - CoreAccountRole { + query GET_ROLE_MANAGEMENT_ROLES($search: String) { + CoreAccountRole(any__value: $search, partial_match: true) { + count edges { node { id @@ -14,6 +15,7 @@ export const GET_ROLE_MANAGEMENT_ROLES = gql` edges { node { id + display_label } } } @@ -22,6 +24,10 @@ export const GET_ROLE_MANAGEMENT_ROLES = gql` edges { node { id + display_label + identifier { + value + } } } } diff --git a/frontend/app/src/graphql/utils.ts b/frontend/app/src/graphql/utils.ts index 57fe7b6a1a..30a042a0b6 100644 --- a/frontend/app/src/graphql/utils.ts +++ b/frontend/app/src/graphql/utils.ts @@ -1,8 +1,13 @@ import { SCHEMA_ATTRIBUTE_KIND } from "@/config/constants"; -import { components } from "@/infraops"; +import { AttributeSchema, RelationshipSchema } from "@/screens/schema/types"; + +type AddAttributesToRequestOptions = { + withPermissions?: boolean; +}; export const addAttributesToRequest = ( - attributes: components["schemas"]["AttributeSchema-Output"][] + attributes: Array, + { withPermissions }: AddAttributesToRequestOptions = {} ) => { return attributes.reduce((acc, attribute) => { const fragment = { @@ -23,6 +28,9 @@ export const addAttributesToRequest = ( display_label: true, __typename: true, }, + permissions: { + update_value: true, + }, }; if (attribute.kind === SCHEMA_ATTRIBUTE_KIND.DROPDOWN) { @@ -32,6 +40,15 @@ export const addAttributesToRequest = ( }; } + if (withPermissions) { + return { + ...acc, + [attribute.name]: { + ...fragment, + }, + }; + } + return { ...acc, [attribute.name]: fragment, @@ -39,9 +56,7 @@ export const addAttributesToRequest = ( }, {}); }; -export const addRelationshipsToRequest = ( - relationships: components["schemas"]["RelationshipSchema-Output"][] -) => { +export const addRelationshipsToRequest = (relationships: Array) => { return relationships.reduce((acc, relationship) => { const fragment = { node: { diff --git a/frontend/app/src/hooks/useAuth.tsx b/frontend/app/src/hooks/useAuth.tsx index fede9b4b09..fa02a58d41 100644 --- a/frontend/app/src/hooks/useAuth.tsx +++ b/frontend/app/src/hooks/useAuth.tsx @@ -1,20 +1,18 @@ import { ALERT_TYPES, Alert } from "@/components/ui/alert"; import { CONFIG } from "@/config/config"; -import { ADMIN_ROLES, REFRESH_TOKEN_KEY } from "@/config/constants"; +import { REFRESH_TOKEN_KEY } from "@/config/constants"; import { ACCESS_TOKEN_KEY } from "@/config/localStorage"; +import graphqlClient from "@/graphql/graphqlClientApollo"; import { components } from "@/infraops"; import { configState } from "@/state/atoms/config.atom"; import { parseJwt } from "@/utils/common"; import { fetchUrl } from "@/utils/fetch"; +import { ObservableQuery } from "@apollo/client"; import { useAtom } from "jotai/index"; import { ReactElement, ReactNode, createContext, useContext, useState } from "react"; import { Navigate, useLocation } from "react-router-dom"; import { toast } from "react-toastify"; -type PermissionsType = { - isAdmin: boolean; -}; - type User = { id: string; }; @@ -29,7 +27,6 @@ export type AuthContextType = { data?: any; isAuthenticated: boolean; isLoading: boolean; - permissions?: PermissionsType; login: (data: { username: string; password: string }, callback?: () => void) => Promise; signOut: (callback?: () => void) => void; setToken: (token: UserToken) => void; @@ -51,6 +48,12 @@ export const removeTokensInLocalStorage = () => { localStorage.removeItem(REFRESH_TOKEN_KEY); }; +const QUERY_TO_IGNORE = ["GET_PROFILE_DETAILS"]; + +const shouldIgnoreQuery = (observableQuery: ObservableQuery) => { + return !!observableQuery.queryName && QUERY_TO_IGNORE.includes(observableQuery.queryName); +}; + export const getNewToken = async () => { const refreshToken = localStorage.getItem(REFRESH_TOKEN_KEY); @@ -86,9 +89,6 @@ export const AuthContext = createContext({ isAuthenticated: false, isLoading: false, data: undefined, - permissions: { - isAdmin: false, - }, login: async () => {}, signOut: () => {}, setToken: () => {}, @@ -133,6 +133,12 @@ export function AuthProvider({ children }: { children: ReactNode }) { const signOut = () => { removeTokensInLocalStorage(); setAccessToken(null); + graphqlClient.refetchQueries({ + include: "active", + onQueryUpdated(observableQuery) { + return !shouldIgnoreQuery(observableQuery); + }, + }); }; const data = parseJwt(accessToken); @@ -142,9 +148,6 @@ export function AuthProvider({ children }: { children: ReactNode }) { data, isAuthenticated: !!accessToken, isLoading, - permissions: { - isAdmin: ADMIN_ROLES.includes(data?.user_claims?.role), - }, login: signIn, signOut, setToken, diff --git a/frontend/app/src/images/icons/git-icon-2.svg b/frontend/app/src/images/icons/git-icon-2.svg deleted file mode 100644 index 253a5df568..0000000000 --- a/frontend/app/src/images/icons/git-icon-2.svg +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/frontend/app/src/images/icons/git-icon.svg b/frontend/app/src/images/icons/git-icon.svg deleted file mode 100644 index 3537783a34..0000000000 --- a/frontend/app/src/images/icons/git-icon.svg +++ /dev/null @@ -1,2 +0,0 @@ - - \ No newline at end of file diff --git a/frontend/app/src/images/icons/graphql-icon.svg b/frontend/app/src/images/icons/graphql-icon.svg deleted file mode 100644 index 44c08c2129..0000000000 --- a/frontend/app/src/images/icons/graphql-icon.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/frontend/app/src/images/icons/swagger-icon.svg b/frontend/app/src/images/icons/swagger-icon.svg deleted file mode 100644 index 3cdaeb116a..0000000000 --- a/frontend/app/src/images/icons/swagger-icon.svg +++ /dev/null @@ -1,16 +0,0 @@ - - - - - - - file_type_swagger - - - - - - - - - \ No newline at end of file diff --git a/frontend/app/src/images/icons/unlink.png b/frontend/app/src/images/icons/unlink.png deleted file mode 100644 index 5d5e26c455..0000000000 Binary files a/frontend/app/src/images/icons/unlink.png and /dev/null differ diff --git a/frontend/app/src/images/icons/unlink.svg b/frontend/app/src/images/icons/unlink.svg deleted file mode 100644 index 5116c5f241..0000000000 --- a/frontend/app/src/images/icons/unlink.svg +++ /dev/null @@ -1 +0,0 @@ -Layer 1 \ No newline at end of file diff --git a/frontend/app/src/infraops.d.ts b/frontend/app/src/infraops.d.ts index e1254d974e..e8a4d30f6d 100644 --- a/frontend/app/src/infraops.d.ts +++ b/frontend/app/src/infraops.d.ts @@ -1144,7 +1144,7 @@ export interface components { value?: components["schemas"]["BranchDiffPropertyCollection"] | null; /** Properties */ properties?: { - [key: string]: components["schemas"]["BranchDiffPropertyCollection"] | undefined; + [key: string]: components["schemas"]["BranchDiffPropertyCollection"]; }; }; /** BranchDiffElementRelationshipMany */ @@ -1161,7 +1161,7 @@ export interface components { summary?: components["schemas"]["DiffSummary"]; /** Peers */ peers?: { - [key: string]: components["schemas"]["BranchDiffElementRelationshipManyPeer"] | undefined; + [key: string]: components["schemas"]["BranchDiffElementRelationshipManyPeer"]; }; }; /** BranchDiffElementRelationshipManyPeer */ @@ -1173,13 +1173,13 @@ export interface components { path: string; /** Properties */ properties?: { - [key: string]: components["schemas"]["BranchDiffPropertyCollection"] | undefined; + [key: string]: components["schemas"]["BranchDiffPropertyCollection"]; }; /** Changed At */ changed_at?: string | null; /** Action */ action?: { - [key: string]: components["schemas"]["DiffAction"] | undefined; + [key: string]: components["schemas"]["DiffAction"]; }; }; /** BranchDiffElementRelationshipOne */ @@ -1202,13 +1202,13 @@ export interface components { peer?: components["schemas"]["BranchDiffRelationshipOnePeerCollection"] | null; /** Properties */ properties?: { - [key: string]: components["schemas"]["BranchDiffPropertyCollection"] | undefined; + [key: string]: components["schemas"]["BranchDiffPropertyCollection"]; }; /** Changed At */ changed_at?: string | null; /** Action */ action?: { - [key: string]: components["schemas"]["DiffAction"] | undefined; + [key: string]: components["schemas"]["DiffAction"]; }; }; /** BranchDiffEntry */ @@ -1221,16 +1221,16 @@ export interface components { path: string; /** Elements */ elements?: { - [key: string]: components["schemas"]["BranchDiffElement"] | undefined; + [key: string]: components["schemas"]["BranchDiffElement"]; }; summary?: components["schemas"]["DiffSummary"]; /** Action */ action?: { - [key: string]: components["schemas"]["DiffAction"] | undefined; + [key: string]: components["schemas"]["DiffAction"]; }; /** Display Label */ display_label?: { - [key: string]: string | undefined; + [key: string]: string; }; }; /** BranchDiffFile */ @@ -1495,15 +1495,15 @@ export interface components { HashableModelDiff: { /** Added */ added?: { - [key: string]: (components["schemas"]["HashableModelDiff"] | null) | undefined; + [key: string]: components["schemas"]["HashableModelDiff"] | null; }; /** Changed */ changed?: { - [key: string]: (components["schemas"]["HashableModelDiff"] | null) | undefined; + [key: string]: components["schemas"]["HashableModelDiff"] | null; }; /** Removed */ removed?: { - [key: string]: (components["schemas"]["HashableModelDiff"] | null) | undefined; + [key: string]: components["schemas"]["HashableModelDiff"] | null; }; }; /** @@ -1584,6 +1584,12 @@ export interface components { * @default true */ allow_anonymous_access: boolean; + /** + * Anonymous Access Role + * @description Name of the role defining which permissions anonymous users have + * @default Anonymous User + */ + anonymous_access_role: string; /** * Telemetry Optout * @description Disable anonymous usage reporting @@ -1595,12 +1601,6 @@ export interface components { * @default https://telemetry.opsmill.cloud/infrahub */ telemetry_endpoint: string; - /** - * Telemetry Interval - * @description Time (in seconds) between telemetry usage push - * @default 86400 - */ - telemetry_interval: number; /** * Permission Backends * @description List of modules to handle permissions, they will be run in the given order @@ -1614,7 +1614,7 @@ export interface components { Menu: { /** Sections */ sections?: { - [key: string]: components["schemas"]["MenuItemList"][] | undefined; + [key: string]: components["schemas"]["MenuItemList"][]; }; }; /** MenuItemList */ @@ -1654,6 +1654,8 @@ export interface components { order_weight: number; /** @default object */ section: components["schemas"]["MenuSection"]; + /** Permissions */ + permissions?: string[]; /** * Children * @description Child objects @@ -1814,7 +1816,7 @@ export interface components { QueryPayload: { /** Variables */ variables?: { - [key: string]: string | undefined; + [key: string]: string; }; }; /** @@ -1992,26 +1994,26 @@ export interface components { main: string; /** Nodes */ nodes?: { - [key: string]: string | undefined; + [key: string]: string; }; /** Generics */ generics?: { - [key: string]: string | undefined; + [key: string]: string; }; }; /** SchemaDiff */ SchemaDiff: { /** Added */ added?: { - [key: string]: components["schemas"]["HashableModelDiff"] | undefined; + [key: string]: components["schemas"]["HashableModelDiff"]; }; /** Changed */ changed?: { - [key: string]: components["schemas"]["HashableModelDiff"] | undefined; + [key: string]: components["schemas"]["HashableModelDiff"]; }; /** Removed */ removed?: { - [key: string]: components["schemas"]["HashableModelDiff"] | undefined; + [key: string]: components["schemas"]["HashableModelDiff"]; }; }; /** SchemaExtension */ @@ -2387,11 +2389,9 @@ export interface operations { }; content: { "application/json": { - [key: string]: - | { - [key: string]: components["schemas"]["BranchDiffRepository"] | undefined; - } - | undefined; + [key: string]: { + [key: string]: components["schemas"]["BranchDiffRepository"]; + }; }; }; }; @@ -2428,7 +2428,7 @@ export interface operations { }; content: { "application/json": { - [key: string]: components["schemas"]["BranchDiffArtifact"] | undefined; + [key: string]: components["schemas"]["BranchDiffArtifact"]; }; }; }; diff --git a/frontend/app/src/pages/ipam/layout.tsx b/frontend/app/src/pages/ipam/layout.tsx index 7a214b0c06..f408cbc9fe 100644 --- a/frontend/app/src/pages/ipam/layout.tsx +++ b/frontend/app/src/pages/ipam/layout.tsx @@ -13,7 +13,7 @@ function IpamLayout() {
- +
diff --git a/frontend/app/src/pages/objects/layout.tsx b/frontend/app/src/pages/objects/layout.tsx index 0a30f98b40..c961002e67 100644 --- a/frontend/app/src/pages/objects/layout.tsx +++ b/frontend/app/src/pages/objects/layout.tsx @@ -1,3 +1,4 @@ +import { ResizableHandle, ResizablePanel, ResizablePanelGroup } from "@/components/ui/resizable"; import { ScrollArea } from "@/components/ui/scroll-area"; import NoDataFound from "@/screens/errors/no-data-found"; import Content from "@/screens/layout/content"; @@ -31,39 +32,56 @@ const ObjectPageLayout = () => { const isHierarchicalModel = "hierarchical" in schema && schema.hierarchical; const inheritFormHierarchicalModel = "hierarchy" in schema && schema.hierarchy; - const getTreeSchema = () => { - if (isHierarchicalModel) { - return schema; - } + if (isHierarchicalModel || inheritFormHierarchicalModel) { + const getTreeSchema = () => { + if (isHierarchicalModel) { + return schema; + } - if (inheritFormHierarchicalModel) { - return generics.find(({ kind }) => kind === schema.hierarchy); - } + if (inheritFormHierarchicalModel) { + return generics.find(({ kind }) => kind === schema.hierarchy); + } - return null; - }; + return null; + }; - const treeSchema = getTreeSchema(); + const treeSchema = getTreeSchema(); + + return ( + + + + + {treeSchema && ( + <> + + + + + + + + )} + + +
+ +
+
+
+
+ ); + } return ( -
- {treeSchema && ( - - - - )} - -
- -
-
+
); }; diff --git a/frontend/app/src/pages/role-management/index.tsx b/frontend/app/src/pages/role-management/index.tsx index e2a14c754e..82b3e28e63 100644 --- a/frontend/app/src/pages/role-management/index.tsx +++ b/frontend/app/src/pages/role-management/index.tsx @@ -1,12 +1,35 @@ +import { GLOBAL_PERMISSION_OBJECT } from "@/config/constants"; +import useQuery from "@/hooks/useQuery"; +import ErrorScreen from "@/screens/errors/error-screen"; +import UnauthorizedScreen from "@/screens/errors/unauthorized-screen"; import Content from "@/screens/layout/content"; +import LoadingScreen from "@/screens/loading-screen/loading-screen"; +import { getObjectPermissionsQuery } from "@/screens/permission/queries/getObjectPermissions"; import { RoleManagementNavigation } from "@/screens/role-management"; +import { gql } from "@apollo/client"; import { Outlet } from "react-router-dom"; function RoleManagement() { + const { loading, error } = useQuery(gql(getObjectPermissionsQuery(GLOBAL_PERMISSION_OBJECT))); + + if (loading) { + return ; + } + + if (error) { + if (error.networkError?.statusCode === 403) { + const { message } = error.networkError?.result?.errors?.[0] ?? {}; + + return ; + } + + return ; + } + return ( diff --git a/frontend/app/src/router.tsx b/frontend/app/src/router.tsx index 1be94fde3f..ae7dee2ef6 100644 --- a/frontend/app/src/router.tsx +++ b/frontend/app/src/router.tsx @@ -367,7 +367,7 @@ export const router = createBrowserRouter([ breadcrumb: () => { return { type: "link", - label: "Role Management", + label: "Users & Permissions", to: constructPath("/role-management"), }; }, diff --git a/frontend/app/src/screens/diff/checks/validator-checks-counter.tsx b/frontend/app/src/screens/diff/checks/validator-checks-counter.tsx deleted file mode 100644 index 00782dd8d0..0000000000 --- a/frontend/app/src/screens/diff/checks/validator-checks-counter.tsx +++ /dev/null @@ -1,23 +0,0 @@ -import { getChecksStats } from "@/utils/checks"; - -type tValidatorChecksCounterProps = { - checks: any[]; -}; - -export const ValidatorChecksCounter = (props: tValidatorChecksCounterProps) => { - const { checks } = props; - - const checksStats = getChecksStats(checks); - - const isEmpty = !Object.values(checksStats).filter(Boolean).length; - - if (isEmpty) { - return ( -
-
0
-
- ); - } - - return
ok
; -}; diff --git a/frontend/app/src/screens/diff/checks/validator-checks-progress.tsx b/frontend/app/src/screens/diff/checks/validator-checks-progress.tsx deleted file mode 100644 index f6727da699..0000000000 --- a/frontend/app/src/screens/diff/checks/validator-checks-progress.tsx +++ /dev/null @@ -1,105 +0,0 @@ -import { getChecksStats } from "@/utils/checks"; - -type tValidatorChecksProgressProps = { - checks: any[]; -}; - -const getCheckBar = (type: string, amount: number, total: number, index: number) => { - const precentage = Math.floor((amount / total) * 100); - - switch (type) { - case "total": { - return null; - } - case "success": { - return ( -
- {amount} -
- ); - } - case "info": { - return ( -
- {amount} -
- ); - } - case "warning": { - return ( -
- {amount} -
- ); - } - case "error": { - return ( -
- {amount} -
- ); - } - case "critical": { - return ( -
- {amount} -
- ); - } - default: { - return ( -
- {amount} -
- ); - } - } -}; - -export const ValidatorChecksProgress = (props: tValidatorChecksProgressProps) => { - const { checks } = props; - - const checksStats = getChecksStats(checks); - - const isEmpty = !Object.values(checksStats).filter(Boolean).length; - - if (isEmpty) { - return ( -
-
0
-
- ); - } - - return ( -
- {Object.entries(checksStats).map(([type, amount], index) => - getCheckBar(type, amount, checksStats.total, index) - )} -
- ); -}; diff --git a/frontend/app/src/screens/errors/unauthorized-screen.tsx b/frontend/app/src/screens/errors/unauthorized-screen.tsx index 105bbbe0e6..eddd97227f 100644 --- a/frontend/app/src/screens/errors/unauthorized-screen.tsx +++ b/frontend/app/src/screens/errors/unauthorized-screen.tsx @@ -1,3 +1,4 @@ +import Accordion from "@/components/display/accordion"; import { classNames } from "@/utils/common"; import { Icon } from "@iconify-icon/react"; import { ReactElement } from "react"; @@ -6,20 +7,26 @@ type tUnauthorized = { className?: string; message?: string; icon?: ReactElement; - hideIcon?: boolean; }; const DEFAULT_MESSAGE = "Sorry, you are not authorized to access this view."; -export default function UnauthorizedScreen({ className, message, icon, hideIcon }: tUnauthorized) { +export default function UnauthorizedScreen({ className, message, icon }: tUnauthorized) { return (
- {!hideIcon && ( -
- {icon || } -
+ {icon || ( + )} -
{message ?? DEFAULT_MESSAGE}
+ + +
{message ?? DEFAULT_MESSAGE}
+
); } diff --git a/frontend/app/src/screens/groups/groups-auto-generated-filter-button.tsx b/frontend/app/src/screens/groups/groups-auto-generated-filter-button.tsx new file mode 100644 index 0000000000..f9d677b35b --- /dev/null +++ b/frontend/app/src/screens/groups/groups-auto-generated-filter-button.tsx @@ -0,0 +1,48 @@ +import { Button, ButtonProps } from "@/components/buttons/button-primitive"; +import useFilters, { Filter } from "@/hooks/useFilters"; +import { classNames } from "@/utils/common"; +import { Icon } from "@iconify-icon/react"; +import { useEffect, useRef } from "react"; + +export const HIDE_AUTO_GENERATED_FILTER: Filter = { name: "group_type__value", value: "default" }; + +export const GroupsAutoGeneratedFilterButton = ({ className, ...props }: ButtonProps) => { + const [filters, setFilters] = useFilters(); + const isInitialMount = useRef(true); + + const hasAutoGeneratedFiltered = filters.some( + (filter) => + filter.name === HIDE_AUTO_GENERATED_FILTER.name && + filter.value === HIDE_AUTO_GENERATED_FILTER.value + ); + + useEffect(() => { + if (isInitialMount.current) { + isInitialMount.current = false; + if (filters.length === 0) { + setFilters([HIDE_AUTO_GENERATED_FILTER]); + } + } + }, [filters, setFilters]); + + const handleClick = () => { + if (hasAutoGeneratedFiltered) { + setFilters(filters.filter((filter) => filter.name !== HIDE_AUTO_GENERATED_FILTER.name)); + } else { + setFilters([...filters, HIDE_AUTO_GENERATED_FILTER]); + } + }; + + return ( + + ); +}; diff --git a/frontend/app/src/screens/layout/menu-navigation/components/menu-section-internal.tsx b/frontend/app/src/screens/layout/menu-navigation/components/menu-section-internal.tsx index 2318690ec2..b613adf53a 100644 --- a/frontend/app/src/screens/layout/menu-navigation/components/menu-section-internal.tsx +++ b/frontend/app/src/screens/layout/menu-navigation/components/menu-section-internal.tsx @@ -52,6 +52,10 @@ const ExpandedMenuItemLink: React.FC<{ item: MenuItem }> = ({ item }) => ( {item.label} + ); diff --git a/frontend/app/src/screens/layout/menu-navigation/components/menu-section-object.tsx b/frontend/app/src/screens/layout/menu-navigation/components/menu-section-object.tsx index 2865f3bbe1..74bcd50596 100644 --- a/frontend/app/src/screens/layout/menu-navigation/components/menu-section-object.tsx +++ b/frontend/app/src/screens/layout/menu-navigation/components/menu-section-object.tsx @@ -34,14 +34,9 @@ const RecursiveObjectMenuItem: React.FC<{ isCollapsed?: boolean; level?: number; }> = ({ item, isCollapsed, level = 0 }) => { - const commonStyleProps = { - className: menuNavigationItemStyle, - style: { marginLeft: level * 20 }, - }; - if (!item.children?.length) { return ( - + {item.label} @@ -51,11 +46,28 @@ const RecursiveObjectMenuItem: React.FC<{ } return ( - - - {item.path ? {item.label} : item.label} + + + + {item.path ? ( + + {item.label} + + ) : ( + item.label + )} - + + {item.children.map((child) => (

{item.label}

{item.children.map((child) => ( diff --git a/frontend/app/src/screens/layout/menu-navigation/menu-navigation.tsx b/frontend/app/src/screens/layout/menu-navigation/menu-navigation.tsx index c4162182f1..e37a4530a5 100644 --- a/frontend/app/src/screens/layout/menu-navigation/menu-navigation.tsx +++ b/frontend/app/src/screens/layout/menu-navigation/menu-navigation.tsx @@ -2,10 +2,11 @@ import { ALERT_TYPES, Alert } from "@/components/ui/alert"; import { Divider } from "@/components/ui/divider"; import { ScrollArea } from "@/components/ui/scroll-area"; import { CONFIG } from "@/config/config"; +import { useAuth } from "@/hooks/useAuth"; import { MenuSectionInternal } from "@/screens/layout/menu-navigation/components/menu-section-internal"; import { MenuSectionObject } from "@/screens/layout/menu-navigation/components/menu-section-object"; import { currentBranchAtom } from "@/state/atoms/branches.atom"; -import { currentSchemaHashAtom, menuAtom } from "@/state/atoms/schema.atom"; +import { menuAtom } from "@/state/atoms/schema.atom"; import { fetchUrl } from "@/utils/fetch"; import { useAtom, useAtomValue } from "jotai"; import { useEffect, useState } from "react"; @@ -16,24 +17,28 @@ export interface MenuNavigationProps { } export default function MenuNavigation({ isCollapsed }: MenuNavigationProps) { + const { accessToken } = useAuth(); const currentBranch = useAtomValue(currentBranchAtom); - const currentSchemaHash = useAtomValue(currentSchemaHashAtom); const [menu, setMenu] = useAtom(menuAtom); const [isLoading, setIsLoading] = useState(false); useEffect(() => { - if (!currentSchemaHash) return; + if (!currentBranch) return; + + const headers = accessToken && { + authorization: `Bearer ${accessToken}`, + }; try { setIsLoading(true); - fetchUrl(CONFIG.MENU_URL(currentBranch?.name)).then((menu) => setMenu(menu)); + fetchUrl(CONFIG.MENU_URL(currentBranch?.name), { headers }).then((menu) => setMenu(menu)); } catch (error) { console.error("error: ", error); toast(); } finally { setIsLoading(false); } - }, [currentSchemaHash]); + }, [currentBranch, accessToken]); if (isLoading) return
Loading...
; if (!menu?.sections) return
; diff --git a/frontend/app/src/screens/layout/menu-navigation/styles.tsx b/frontend/app/src/screens/layout/menu-navigation/styles.tsx index 744acbf33e..57ba3b5f19 100644 --- a/frontend/app/src/screens/layout/menu-navigation/styles.tsx +++ b/frontend/app/src/screens/layout/menu-navigation/styles.tsx @@ -1,2 +1,2 @@ export const menuNavigationItemStyle = - "flex items-center outline-none gap-2 px-3 py-2 rounded font-medium text-neutral-900 hover:bg-neutral-100 focus:bg-neutral-100 group data-[state=open]:bg-indigo-50 data-[state=open]:text-indigo-700"; + "flex items-center outline-none gap-2 p-2 rounded font-medium text-neutral-900 hover:bg-neutral-100 focus:bg-neutral-100 group data-[state=open]:bg-indigo-50 data-[state=open]:text-indigo-700"; diff --git a/frontend/app/src/screens/layout/navigation-list.tsx b/frontend/app/src/screens/layout/navigation-list.tsx deleted file mode 100644 index a35073d9e8..0000000000 --- a/frontend/app/src/screens/layout/navigation-list.tsx +++ /dev/null @@ -1,66 +0,0 @@ -import { - BoltIcon, - ChartBarIcon, - CpuChipIcon, - LinkIcon, - ListBulletIcon, - WifiIcon, -} from "@heroicons/react/24/outline"; - -export const navigation = [ - { name: "Dashboard", icon: ListBulletIcon, current: true, href: "#" }, - { - name: "Connections", - icon: LinkIcon, - current: false, - children: [ - { name: "Cables", href: "#" }, - { name: "Wireless Links", href: "#" }, - { name: "Interface Connections", href: "#" }, - { name: "Console Connections", href: "#" }, - { name: "Power Connections", href: "#" }, - ], - }, - { - name: "Wireless", - icon: WifiIcon, - current: false, - children: [ - { name: "Wireless LANs", href: "#" }, - { name: "Wireless LAN Groups", href: "#" }, - ], - }, - { - name: "Power", - icon: BoltIcon, - current: false, - children: [ - { name: "Power Feeds", href: "#" }, - { name: "Power Panels", href: "#" }, - ], - }, - { - name: "Virtualization", - icon: CpuChipIcon, - current: false, - children: [ - { name: "Virtual Machines", href: "#" }, - { name: "Interfaces", href: "#" }, - { name: "Clusters", href: "#" }, - { name: "Cluster Types", href: "#" }, - { name: "Cluster Groups", href: "#" }, - ], - }, - { - name: "Reports", - icon: ChartBarIcon, - current: false, - children: [ - { name: "Overview", href: "#" }, - { name: "Devices", href: "#" }, - { name: "Settings", href: "#" }, - ], - }, -]; - -export const userNavigation = [{ name: "Your Profile", href: "/profile" }]; diff --git a/frontend/app/src/screens/object-item-edit/generateObjectEditFormQuery.ts b/frontend/app/src/screens/object-item-edit/generateObjectEditFormQuery.ts index 9508e22d6e..51019f12e1 100644 --- a/frontend/app/src/screens/object-item-edit/generateObjectEditFormQuery.ts +++ b/frontend/app/src/screens/object-item-edit/generateObjectEditFormQuery.ts @@ -6,11 +6,9 @@ import { jsonToGraphQLQuery } from "json-to-graphql-query"; export const generateObjectEditFormQuery = ({ schema, objectId, - withProfiles, }: { schema: iNodeSchema | IProfileSchema; objectId: string; - withProfiles?: boolean; }): string => { const request = { query: { @@ -23,7 +21,7 @@ export const generateObjectEditFormQuery = ({ node: { id: true, display_label: true, - ...addAttributesToRequest(schema.attributes ?? []), + ...addAttributesToRequest(schema.attributes ?? [], { withPermissions: true }), ...addRelationshipsToRequest(getRelationshipsForForm(schema.relationships ?? [], true)), ...("generate_profile" in schema && schema.generate_profile ? { diff --git a/frontend/app/src/screens/object-items/object-items-paginated.tsx b/frontend/app/src/screens/object-items/object-items-paginated.tsx index c68d337b01..3d08588c4c 100644 --- a/frontend/app/src/screens/object-items/object-items-paginated.tsx +++ b/frontend/app/src/screens/object-items/object-items-paginated.tsx @@ -15,8 +15,10 @@ import { useObjectItems } from "@/hooks/useObjectItems"; import { useTitle } from "@/hooks/useTitle"; import ErrorScreen from "@/screens/errors/error-screen"; import NoDataFound from "@/screens/errors/no-data-found"; +import { GroupsAutoGeneratedFilterButton } from "@/screens/groups/groups-auto-generated-filter-button"; import LoadingScreen from "@/screens/loading-screen/loading-screen"; import { ObjectItemsCell, TextCell } from "@/screens/object-items/object-items-cell"; +import { isOfKind } from "@/screens/schema/utils"; import { IModelSchema } from "@/state/atoms/schema.atom"; import { classNames, debounce } from "@/utils/common"; import { getDisplayValue } from "@/utils/getObjectItemDisplayValue"; @@ -117,7 +119,7 @@ export default function ObjectItems({ return ( <>
-
+
+ {isOfKind("CoreGroup", schema) && } +
diff --git a/frontend/app/src/screens/objects/hierarchical-tree.tsx b/frontend/app/src/screens/objects/hierarchical-tree.tsx index 998c4abd6a..ee1b3a78a3 100644 --- a/frontend/app/src/screens/objects/hierarchical-tree.tsx +++ b/frontend/app/src/screens/objects/hierarchical-tree.tsx @@ -4,7 +4,9 @@ import { objectChildrenQuery, objectTopLevelTreeQuery, } from "@/graphql/queries/objects/objectTreeQuery"; +import useFilters from "@/hooks/useFilters"; import { useLazyQuery } from "@/hooks/useQuery"; +import { HIDE_AUTO_GENERATED_FILTER } from "@/screens/groups/groups-auto-generated-filter-button"; import { TREE_ROOT_ID } from "@/screens/ipam/constants"; import { EMPTY_TREE, PrefixNode, updateTreeData } from "@/screens/ipam/ipam-tree/utils"; import { currentBranchAtom } from "@/state/atoms/branches.atom"; @@ -29,12 +31,25 @@ export const HierarchicalTree = ({ schema, currentNodeId, className }: Hierarchi const navigate = useNavigate(); const currentBranch = useAtomValue(currentBranchAtom); const currentDate = useAtomValue(datetimeAtom); + const [filters] = useFilters(); + const hasAutoGeneratedFiltered = filters.some( + (filter) => + filter.name === HIDE_AUTO_GENERATED_FILTER.name && + filter.value === HIDE_AUTO_GENERATED_FILTER.value + ); const [treeData, setTreeData] = useState(EMPTY_TREE); const [expandedIds, setExpandedIds] = useState([]); const [selectedIds, setSelectedIds] = useState([]); - const [getObjectTopLevelTree] = useLazyQuery(gql(objectTopLevelTreeQuery({ kind: schema.kind }))); + const [getObjectTopLevelTree] = useLazyQuery( + gql( + objectTopLevelTreeQuery({ + kind: schema.kind, + filters: hasAutoGeneratedFiltered ? `group_type__value: "default"` : undefined, + }) + ) + ); const [getObjectAncestors] = useLazyQuery(gql(objectAncestorsQuery({ kind: schema.kind }))); const [getTreeItemChildren] = useLazyQuery(gql(objectChildrenQuery({ kind: schema.kind }))); const [isLoading, setLoading] = useState(true); @@ -98,7 +113,10 @@ export const HierarchicalTree = ({ schema, currentNodeId, className }: Hierarchi setLoading(true); setSelectedIds([]); setExpandedIds([]); + + let isCancelled = false; fetchTree().then((tree) => { + if (isCancelled) return; if (!tree) return; setLoading(false); @@ -107,7 +125,11 @@ export const HierarchicalTree = ({ schema, currentNodeId, className }: Hierarchi setSelectedIds([currentNodeId]); } }); - }, [schema.kind, currentBranch, currentDate]); + + return () => { + isCancelled = true; + }; + }, [schema.kind, currentBranch, currentDate, hasAutoGeneratedFiltered]); const onLoadData = async ({ element }: ITreeViewOnLoadDataProps) => { if (!element.isBranch || element.children.length > 0) return; // To avoid refetching data diff --git a/frontend/app/src/screens/permission/types.ts b/frontend/app/src/screens/permission/types.ts index c7e119d13c..73d3ffc337 100644 --- a/frontend/app/src/screens/permission/types.ts +++ b/frontend/app/src/screens/permission/types.ts @@ -1,4 +1,4 @@ -export type PermissionDecisionData = "DENY" | "ALLOW_ALL" | "ALLOW_DEFAULT" | "ALLOW_OTHER"; +export type PermissionDecisionData = "ALLOW" | "ALLOW_DEFAULT" | "ALLOW_OTHER" | "DENY"; export type PermissionAction = "view" | "create" | "update" | "delete"; diff --git a/frontend/app/src/screens/permission/utils.ts b/frontend/app/src/screens/permission/utils.ts index a7d313a092..a906f681be 100644 --- a/frontend/app/src/screens/permission/utils.ts +++ b/frontend/app/src/screens/permission/utils.ts @@ -6,26 +6,9 @@ import { PermissionDecisionData, } from "@/screens/permission/types"; import { store } from "@/state"; -import { currentBranchAtom } from "@/state/atoms/branches.atom"; import { configState } from "@/state/atoms/config.atom"; import { warnUnexpectedType } from "@/utils/common"; -const isActionAllowedOnBranch = ( - decision: PermissionDecisionData, - isOnDefaultBranch: boolean -): boolean => { - switch (decision) { - case "ALLOW_ALL": - return true; - case "ALLOW_DEFAULT": - return isOnDefaultBranch; - case "ALLOW_OTHER": - return !isOnDefaultBranch; - default: - return false; - } -}; - const getMessage = (action: string, decision?: PermissionDecisionData): string => { if (!decision) return `Unable to determine permission to ${action} this object. Please contact your administrator.`; @@ -37,7 +20,7 @@ const getMessage = (action: string, decision?: PermissionDecisionData): string = return `This action is only allowed on the default branch. Please switch to the default branch to ${action} this object.`; case "ALLOW_OTHER": return `This action is not allowed on the default branch. Please switch to a different branch to ${action} this object.`; - case "ALLOW_ALL": + case "ALLOW": return `You have permission to ${action} this object on any branch.`; default: warnUnexpectedType(decision); @@ -49,27 +32,22 @@ export function getPermission(permission?: Array<{ node: PermissionData }>): Per if (!Array.isArray(permission)) return PERMISSION_ALLOW_ALL; const config = store.get(configState); - const currentBranch = store.get(currentBranchAtom); - const isOnDefaultBranch = !!currentBranch?.is_default; const createPermissionAction = (action: PermissionAction): PermissionDecision => { if (action === "view" && config?.main.allow_anonymous_access) return { isAllowed: true }; - const permissionAllowNode = permission.find(({ node }) => - isActionAllowedOnBranch(node[action], isOnDefaultBranch) - ); + const permissionAllowNode = permission.find(({ node }) => node[action] === "ALLOW"); if (permissionAllowNode) { return { isAllowed: true }; - } else { - const permissionDeniedNode = permission.find( - ({ node }) => !isActionAllowedOnBranch(node[action], isOnDefaultBranch) - ); - return { - isAllowed: false, - message: getMessage(action, permissionDeniedNode?.node?.[action]), - }; } + + const permissionDeniedNode = permission.find(({ node }) => node[action] !== "ALLOW"); + + return { + isAllowed: false, + message: getMessage(action, permissionDeniedNode?.node?.[action]), + }; }; return { diff --git a/frontend/app/src/screens/role-management/account-form.tsx b/frontend/app/src/screens/role-management/account-form.tsx index 348edce3bf..4fde745ce0 100644 --- a/frontend/app/src/screens/role-management/account-form.tsx +++ b/frontend/app/src/screens/role-management/account-form.tsx @@ -115,17 +115,19 @@ export const AccountForm = ({ }} /> - + {!currentObject && ( + + )} diff --git a/frontend/app/src/screens/role-management/account-role-form.tsx b/frontend/app/src/screens/role-management/account-role-form.tsx index 2ae874dfec..76e19e99a5 100644 --- a/frontend/app/src/screens/role-management/account-role-form.tsx +++ b/frontend/app/src/screens/role-management/account-role-form.tsx @@ -140,6 +140,7 @@ export const AccountRoleForm = ({ cardinality: "many", }} options={permissions.value} + peerField="identifier" />
diff --git a/frontend/app/src/screens/role-management/accounts.tsx b/frontend/app/src/screens/role-management/accounts.tsx index 0fcae8229f..c1b86ba39e 100644 --- a/frontend/app/src/screens/role-management/accounts.tsx +++ b/frontend/app/src/screens/role-management/accounts.tsx @@ -1,26 +1,43 @@ import { Button } from "@/components/buttons/button-primitive"; import { ColorDisplay } from "@/components/display/color-display"; -import { Pill } from "@/components/display/pill"; import SlideOver, { SlideOverTitle } from "@/components/display/slide-over"; import ObjectForm from "@/components/form/object-form"; import ModalDeleteObject from "@/components/modals/modal-delete-object"; import { Table, tRowValue } from "@/components/table/table"; import { Pagination } from "@/components/ui/pagination"; +import { SearchInput } from "@/components/ui/search-input"; import { ACCOUNT_GENERIC_OBJECT, ACCOUNT_OBJECT } from "@/config/constants"; import graphqlClient from "@/graphql/graphqlClientApollo"; import { GET_ROLE_MANAGEMENT_ACCOUNTS } from "@/graphql/queries/role-management/getAccounts"; +import { useDebounce } from "@/hooks/useDebounce"; import useQuery from "@/hooks/useQuery"; import { useSchema } from "@/hooks/useSchema"; import { schemaKindNameState } from "@/state/atoms/schemaKindName.atom"; +import { NetworkStatus } from "@apollo/client"; import { useAtomValue } from "jotai"; import { useState } from "react"; import ErrorScreen from "../errors/error-screen"; import UnauthorizedScreen from "../errors/unauthorized-screen"; import LoadingScreen from "../loading-screen/loading-screen"; import { getPermission } from "../permission/utils"; +import { RelationshipDisplay } from "./relationship-display"; function Accounts() { - const { loading, data, error, refetch } = useQuery(GET_ROLE_MANAGEMENT_ACCOUNTS); + const [search, setSearch] = useState(""); + const searchDebounced = useDebounce(search, 300); + + const { + loading, + networkStatus, + data: latestData, + previousData, + error, + refetch, + } = useQuery(GET_ROLE_MANAGEMENT_ACCOUNTS, { + variables: { search: searchDebounced }, + notifyOnNetworkStatusChange: true, + }); + const data = latestData || previousData; const schemaKindName = useAtomValue(schemaKindNameState); const { schema } = useSchema(ACCOUNT_GENERIC_OBJECT); @@ -79,7 +96,11 @@ function Accounts() { }, member_of_groups: { value: { edges: edge?.node?.member_of_groups?.edges }, - display: {edge?.node?.member_of_groups?.count}, + display: ( + edge?.node?.display_label)} + /> + ), }, __typename: edge?.node?.__typename, }, @@ -95,7 +116,7 @@ function Accounts() { return ; } - if (loading) { + if (networkStatus === NetworkStatus.loading) { return ; } @@ -111,19 +132,24 @@ function Accounts() { return ( <>
-
-
{/* Search input + filter button */}
- -
- -
+
+ setSearch(e.target.value)} + placeholder="Search accounts" + className="border-none focus-visible:ring-0" + containerClassName="flex-grow" + /> + +
setShowDrawer(value)} + onClose={() => setRowToUpdate(null)} > setShowDrawer(false)} + onCancel={() => { + setRowToUpdate(null); + setShowDrawer(false); + }} onSuccess={() => { setShowDrawer(false); globalRefetch(); diff --git a/frontend/app/src/screens/role-management/constants.ts b/frontend/app/src/screens/role-management/constants.ts new file mode 100644 index 0000000000..d7978c9fd4 --- /dev/null +++ b/frontend/app/src/screens/role-management/constants.ts @@ -0,0 +1,29 @@ +export const objectDecisionOptions = [ + { + value: 1, + label: "Deny everywhere", + }, + { + value: 2, + label: "Allow on default branch", + }, + { + value: 4, + label: "Allow on other branches", + }, + { + value: 6, + label: "Allow in all branches", + }, +]; + +export const globalDecisionOptions = [ + { + value: 1, + label: "Deny", + }, + { + value: 6, + label: "Allow", + }, +]; diff --git a/frontend/app/src/screens/role-management/global-permissions-form.tsx b/frontend/app/src/screens/role-management/global-permissions-form.tsx index a6a747eaed..682d3ca941 100644 --- a/frontend/app/src/screens/role-management/global-permissions-form.tsx +++ b/frontend/app/src/screens/role-management/global-permissions-form.tsx @@ -19,11 +19,11 @@ import { FieldValues, useForm } from "react-hook-form"; import { toast } from "react-toastify"; import DropdownField from "@/components/form/fields/dropdown.field"; -import InputField from "@/components/form/fields/input.field"; import RelationshipField from "@/components/form/fields/relationship.field"; import { getRelationshipDefaultValue } from "@/components/form/utils/getRelationshipDefaultValue"; import { isRequired } from "@/components/form/utils/validation"; import { useSchema } from "@/hooks/useSchema"; +import { globalDecisionOptions } from "./constants"; interface NumberPoolFormProps extends Pick { currentObject?: Record; @@ -46,7 +46,6 @@ export const GlobalPermissionForm = ({ }); const defaultValues = { - name: getCurrentFieldValue("name", currentObject), action: getCurrentFieldValue("action", currentObject), decision: getCurrentFieldValue("decision", currentObject), roles, @@ -67,17 +66,6 @@ export const GlobalPermissionForm = ({ }; }); - const decisionOptions = [ - { - value: 1, - label: "Deny", - }, - { - value: 6, - label: "Allow", - }, - ]; - async function handleSubmit(data: Record) { try { const newObject = getCreateMutationFromFormDataOnly(data, currentObject); @@ -127,17 +115,6 @@ export const GlobalPermissionForm = ({ return (
- - attribute.name === "decision")?.description + } + items={globalDecisionOptions} rules={{ required: true, validate: { required: isRequired } }} /> diff --git a/frontend/app/src/screens/role-management/global-permissions.tsx b/frontend/app/src/screens/role-management/global-permissions.tsx index fdf3c9b6f8..6bb2141050 100644 --- a/frontend/app/src/screens/role-management/global-permissions.tsx +++ b/frontend/app/src/screens/role-management/global-permissions.tsx @@ -1,29 +1,45 @@ import { Button } from "@/components/buttons/button-primitive"; -import { Pill } from "@/components/display/pill"; import SlideOver, { SlideOverTitle } from "@/components/display/slide-over"; import ObjectForm from "@/components/form/object-form"; import ModalDeleteObject from "@/components/modals/modal-delete-object"; import { Table, tRowValue } from "@/components/table/table"; import { BadgeCopy } from "@/components/ui/badge-copy"; import { Pagination } from "@/components/ui/pagination"; +import { SearchInput } from "@/components/ui/search-input"; import { GLOBAL_PERMISSION_OBJECT } from "@/config/constants"; import graphqlClient from "@/graphql/graphqlClientApollo"; import { GET_ROLE_MANAGEMENT_GLOBAL_PERMISSIONS } from "@/graphql/queries/role-management/getGlobalPermissions"; +import { useDebounce } from "@/hooks/useDebounce"; import useQuery from "@/hooks/useQuery"; import { useSchema } from "@/hooks/useSchema"; import { schemaKindNameState } from "@/state/atoms/schemaKindName.atom"; -import { Icon } from "@iconify-icon/react"; +import { NetworkStatus } from "@apollo/client"; import { useAtomValue } from "jotai"; import { useState } from "react"; import ErrorScreen from "../errors/error-screen"; import UnauthorizedScreen from "../errors/unauthorized-screen"; import LoadingScreen from "../loading-screen/loading-screen"; import { getPermission } from "../permission/utils"; +import { globalDecisionOptions } from "./constants"; +import { RelationshipDisplay } from "./relationship-display"; function GlobalPermissions() { const schemaKindName = useAtomValue(schemaKindNameState); const { schema } = useSchema(GLOBAL_PERMISSION_OBJECT); - const { loading, data, error, refetch } = useQuery(GET_ROLE_MANAGEMENT_GLOBAL_PERMISSIONS); + const [search, setSearch] = useState(""); + const searchDebounced = useDebounce(search, 300); + const { + loading, + networkStatus, + data: latestData, + previousData, + error, + refetch, + } = useQuery(GET_ROLE_MANAGEMENT_GLOBAL_PERMISSIONS, { + variables: { search: searchDebounced }, + notifyOnNetworkStatusChange: true, + }); + const data = latestData || previousData; const [rowToDelete, setRowToDelete] = useState - - - - - {edge?.node?.display_label} -
- ), - value: edge?.node?.name?.value, - }, action: { value: edge?.node?.action?.value }, - decision: { value: edge?.node?.decision?.value }, + decision: { + display: globalDecisionOptions.find( + (decision) => decision.value === edge?.node?.decision?.value + )?.label, + value: edge?.node?.decision?.value, + }, roles: { - display: {edge?.node?.roles?.count}, + display: ( + edge?.node?.display_label)} + /> + ), value: { edges: edge?.node?.roles?.edges }, }, identifier: { display: }, @@ -100,7 +113,7 @@ function GlobalPermissions() { return ; } - if (loading) { + if (networkStatus === NetworkStatus.loading) { return ; } @@ -111,18 +124,23 @@ function GlobalPermissions() { return ( <>
-
-
{/* Search input + filter button */}
- -
- -
+
+ setSearch(e.target.value)} + placeholder="Search global permissions" + className="border-none focus-visible:ring-0" + containerClassName="flex-grow" + /> + +
setShowDrawer(value)} + onClose={() => setRowToUpdate(null)} > setShowDrawer(false)} + onCancel={() => { + setRowToUpdate(null); + setShowDrawer(false); + }} onSuccess={() => { setShowDrawer(false); globalRefetch(); diff --git a/frontend/app/src/screens/role-management/group-member.tsx b/frontend/app/src/screens/role-management/group-member.tsx index 2be6b8dffd..79968a9660 100644 --- a/frontend/app/src/screens/role-management/group-member.tsx +++ b/frontend/app/src/screens/role-management/group-member.tsx @@ -1,4 +1,6 @@ +import { Button } from "@/components/buttons/button-primitive"; import { Avatar } from "@/components/display/avatar"; +import { Popover, PopoverContent, PopoverTrigger } from "@/components/ui/popover"; import { Tooltip } from "@/components/ui/tooltip"; interface GroupMembersProps { @@ -7,8 +9,7 @@ interface GroupMembersProps { export function GroupMembers({ members }: GroupMembersProps) { const trimedMembers = members.slice(0, 5); - - const lengthDiff = members.length - trimedMembers.length; + const remainingItems = members.slice(5); return (
@@ -25,8 +26,22 @@ export function GroupMembers({ members }: GroupMembersProps) { ))}
- {!!lengthDiff && ( - + {!!remainingItems?.length && ( + + + + + + +
+ {remainingItems.map((item, index) => ( + + + + ))} +
+
+
)} ); diff --git a/frontend/app/src/screens/role-management/groups.tsx b/frontend/app/src/screens/role-management/groups.tsx index 479568365d..a4280a49f3 100644 --- a/frontend/app/src/screens/role-management/groups.tsx +++ b/frontend/app/src/screens/role-management/groups.tsx @@ -1,16 +1,18 @@ import { Button } from "@/components/buttons/button-primitive"; -import { Pill } from "@/components/display/pill"; import SlideOver, { SlideOverTitle } from "@/components/display/slide-over"; import ObjectForm from "@/components/form/object-form"; import ModalDeleteObject from "@/components/modals/modal-delete-object"; import { Table, tRowValue } from "@/components/table/table"; import { Pagination } from "@/components/ui/pagination"; +import { SearchInput } from "@/components/ui/search-input"; import { ACCOUNT_GROUP_OBJECT } from "@/config/constants"; import graphqlClient from "@/graphql/graphqlClientApollo"; import { GET_ROLE_MANAGEMENT_GROUPS } from "@/graphql/queries/role-management/getGroups"; +import { useDebounce } from "@/hooks/useDebounce"; import useQuery from "@/hooks/useQuery"; import { useSchema } from "@/hooks/useSchema"; import { schemaKindNameState } from "@/state/atoms/schemaKindName.atom"; +import { NetworkStatus } from "@apollo/client"; import { useAtomValue } from "jotai"; import { useState } from "react"; import ErrorScreen from "../errors/error-screen"; @@ -18,9 +20,23 @@ import UnauthorizedScreen from "../errors/unauthorized-screen"; import LoadingScreen from "../loading-screen/loading-screen"; import { getPermission } from "../permission/utils"; import { GroupMembers } from "./group-member"; +import { RelationshipDisplay } from "./relationship-display"; function Groups() { - const { loading, data, error, refetch } = useQuery(GET_ROLE_MANAGEMENT_GROUPS); + const [search, setSearch] = useState(""); + const searchDebounced = useDebounce(search, 300); + const { + loading, + networkStatus, + data: latestData, + previousData, + error, + refetch, + } = useQuery(GET_ROLE_MANAGEMENT_GROUPS, { + variables: { search: searchDebounced }, + notifyOnNetworkStatusChange: true, + }); + const data = latestData || previousData; const schemaKindName = useAtomValue(schemaKindNameState); const { schema } = useSchema(ACCOUNT_GROUP_OBJECT); const [rowToDelete, setRowToDelete] = useState{edge?.node?.roles?.count}, + display: ( + edge?.node?.display_label)} + /> + ), }, __typename: edge?.node?.__typename, }, @@ -93,7 +113,7 @@ function Groups() { return ; } - if (loading) { + if (networkStatus === NetworkStatus.loading) { return ; } @@ -109,18 +129,23 @@ function Groups() { return ( <>
-
-
{/* Search input + filter button */}
- -
- -
+
+ setSearch(e.target.value)} + placeholder="Search groups" + className="border-none focus-visible:ring-0" + containerClassName="flex-grow" + /> + +
setShowDrawer(value)} + onClose={() => setRowToUpdate(null)} > setShowDrawer(false)} + onCancel={() => { + setRowToUpdate(null); + setShowDrawer(false); + }} onSuccess={() => { setShowDrawer(false); globalRefetch(); diff --git a/frontend/app/src/screens/role-management/index.tsx b/frontend/app/src/screens/role-management/index.tsx index 4a4f9cbdef..766d799ce6 100644 --- a/frontend/app/src/screens/role-management/index.tsx +++ b/frontend/app/src/screens/role-management/index.tsx @@ -78,9 +78,5 @@ export function RoleManagementNavigation() { }, ]; - return ( -
- -
- ); + return ; } diff --git a/frontend/app/src/screens/role-management/object-permissions-form.tsx b/frontend/app/src/screens/role-management/object-permissions-form.tsx index 6b400cdcd8..9d35ef97a1 100644 --- a/frontend/app/src/screens/role-management/object-permissions-form.tsx +++ b/frontend/app/src/screens/role-management/object-permissions-form.tsx @@ -24,6 +24,9 @@ import DropdownField from "@/components/form/fields/dropdown.field"; import RelationshipField from "@/components/form/fields/relationship.field"; import { getRelationshipDefaultValue } from "@/components/form/utils/getRelationshipDefaultValue"; import { isRequired } from "@/components/form/utils/validation"; +import { useSchema } from "@/hooks/useSchema"; +import { useEffect } from "react"; +import { objectDecisionOptions } from "./constants"; interface NumberPoolFormProps extends Pick { currentObject?: Record; @@ -37,6 +40,7 @@ export const ObjectPermissionForm = ({ onCancel, onUpdateComplete, }: NumberPoolFormProps) => { + const { schema } = useSchema(OBJECT_PERMISSION_OBJECT); const branch = useAtomValue(currentBranchAtom); const date = useAtomValue(datetimeAtom); @@ -79,25 +83,6 @@ export const ObjectPermissionForm = ({ }, ]; - const decisionOptions = [ - { - value: 1, - label: "Deny", - }, - { - value: 2, - label: "Allow Default", - }, - { - value: 4, - label: "Allow Other", - }, - { - value: 6, - label: "Allow All", - }, - ]; - async function handleSubmit(data: Record) { try { const newObject = getCreateMutationFromFormDataOnly(data, currentObject); @@ -159,7 +144,10 @@ export const ObjectPermissionForm = ({ attribute.name === "decision")?.description + } + items={objectDecisionOptions} rules={{ required: true, validate: { required: isRequired } }} /> @@ -194,28 +182,35 @@ const NodeSelect = () => { const form = useFormContext(); const selectedNamespaceField: FormAttributeValue = form.watch("namespace"); + const selectedNameField: FormAttributeValue = form.watch("name"); const namespaceOptions = [ { value: "*", label: "*", }, - ...namespaces - .filter((namespace) => { - return namespace.name !== "Internal" && namespace.name !== "Lineage"; - }) - .map((namespace) => { - return { - value: namespace.name, - label: namespace.name, - }; - }), + ...namespaces.map((namespace) => { + return { + value: namespace.name, + label: namespace.name, + }; + }), ]; const selectedNamespace = selectedNamespaceField?.value === "*" ? { value: "*", name: "*" } - : namespaces.find((namespace) => namespace.name === selectedNamespaceField?.value); + : namespaces + .filter((namespace) => { + if (!selectedNameField?.value) { + return true; + } + + return namespace.used_by?.includes(selectedNameField?.value); + }) + .find((namespace) => { + return namespace.name === selectedNamespaceField?.value; + }); const nameOptions = [ { @@ -231,9 +226,24 @@ const NodeSelect = () => { .map((node) => ({ value: node.name, label: node.label, + badge: node.namespace, })), ]; + useEffect(() => { + // Break if namespace already set + if (selectedNamespaceField?.value) return; + + // Break if no name is provided + if (!selectedNameField?.value) return; + + // Get current node from form field value + const currentNode = nodes.find((node) => node.name === selectedNameField?.value); + if (!currentNode) return; + + form.setValue("namespace", { value: currentNode.namespace, label: currentNode.namespace }); + }, [selectedNameField?.value]); + return ( <> = { allow: ( @@ -34,7 +39,20 @@ const icons: Record = { }; function Permissions() { - const { loading, data, error, refetch } = useQuery(GET_ROLE_MANAGEMENT_OBJECT_PERMISSIONS); + const [search, setSearch] = useState(""); + const searchDebounced = useDebounce(search, 300); + const { + loading, + networkStatus, + data: latestData, + previousData, + error, + refetch, + } = useQuery(GET_ROLE_MANAGEMENT_OBJECT_PERMISSIONS, { + variables: { search: searchDebounced }, + notifyOnNetworkStatusChange: true, + }); + const data = latestData || previousData; const schemaKindName = useAtomValue(schemaKindNameState); const { schema } = useSchema(OBJECT_PERMISSION_OBJECT); const [rowToDelete, setRowToDelete] = useState decision.value === edge?.node?.decision?.value + )?.label, value: edge?.node?.decision?.value, }, roles: { value: { edges: edge?.node?.roles?.edges }, - display: {edge?.node?.roles?.count}, + display: ( + edge?.node?.display_label)} + /> + ), }, identifier: { value: edge?.node?.identifier?.value, @@ -133,7 +154,7 @@ function Permissions() { return ; } - if (loading) { + if (networkStatus === NetworkStatus.loading) { return ; } @@ -149,18 +170,23 @@ function Permissions() { return ( <>
-
-
{/* Search input + filter button */}
- -
- -
+
+ setSearch(e.target.value)} + placeholder="Search object permissions" + className="border-none focus-visible:ring-0" + containerClassName="flex-grow" + /> + +
setShowDrawer(value)} + onClose={() => setRowToUpdate(null)} > setShowDrawer(false)} + onCancel={() => { + setRowToUpdate(null); + setShowDrawer(false); + }} onSuccess={() => { setShowDrawer(false); globalRefetch(); diff --git a/frontend/app/src/screens/role-management/relationship-display.tsx b/frontend/app/src/screens/role-management/relationship-display.tsx new file mode 100644 index 0000000000..2723303906 --- /dev/null +++ b/frontend/app/src/screens/role-management/relationship-display.tsx @@ -0,0 +1,38 @@ +import { Button } from "@/components/buttons/button-primitive"; +import { Badge } from "@/components/ui/badge"; +import { Popover, PopoverContent, PopoverTrigger } from "@/components/ui/popover"; + +interface RelationshipDisplayProps { + items: Array; +} + +export function RelationshipDisplay({ items }: RelationshipDisplayProps) { + const trimedItems = items.slice(0, 3); + const remainingItems = items.slice(3); + + return ( +
+
+ {trimedItems.map((item, index) => ( + {item} + ))} +
+ + {!!remainingItems?.length && ( + + + + + + +
+ {remainingItems.map((item, index) => ( + {item} + ))} +
+
+
+ )} +
+ ); +} diff --git a/frontend/app/src/screens/role-management/roles.tsx b/frontend/app/src/screens/role-management/roles.tsx index c09c63737f..159f065ebd 100644 --- a/frontend/app/src/screens/role-management/roles.tsx +++ b/frontend/app/src/screens/role-management/roles.tsx @@ -1,4 +1,3 @@ -import { Pill } from "@/components/display/pill"; import ModalDeleteObject from "@/components/modals/modal-delete-object"; import { Table, tRowValue } from "@/components/table/table"; import { Pagination } from "@/components/ui/pagination"; @@ -13,14 +12,31 @@ import LoadingScreen from "../loading-screen/loading-screen"; import { Button } from "@/components/buttons/button-primitive"; import SlideOver, { SlideOverTitle } from "@/components/display/slide-over"; import ObjectForm from "@/components/form/object-form"; +import { SearchInput } from "@/components/ui/search-input"; import graphqlClient from "@/graphql/graphqlClientApollo"; +import { useDebounce } from "@/hooks/useDebounce"; import useQuery from "@/hooks/useQuery"; import { useSchema } from "@/hooks/useSchema"; +import { NetworkStatus } from "@apollo/client"; import UnauthorizedScreen from "../errors/unauthorized-screen"; import { getPermission } from "../permission/utils"; +import { RelationshipDisplay } from "./relationship-display"; function Roles() { - const { loading, data, error, refetch } = useQuery(GET_ROLE_MANAGEMENT_ROLES); + const [search, setSearch] = useState(""); + const searchDebounced = useDebounce(search, 300); + const { + loading, + networkStatus, + data: latestData, + previousData, + error, + refetch, + } = useQuery(GET_ROLE_MANAGEMENT_ROLES, { + variables: { search: searchDebounced }, + notifyOnNetworkStatusChange: true, + }); + const data = latestData || previousData; const schemaKindName = useAtomValue(schemaKindNameState); const { schema } = useSchema(ACCOUNT_ROLE_OBJECT); const [rowToDelete, setRowToDelete] = useState{edge?.node?.groups?.count}, + display: ( + edge?.node?.display_label)} + /> + ), }, permissions: { value: { edges: edge?.node?.permissions?.edges }, - display: {edge?.node?.permissions?.count}, + display: ( + edge?.node?.identifier?.value)} + /> + ), }, - __typename: { value: edge?.node?.__typename }, + __typename: edge?.node?.__typename, }, })); @@ -79,7 +103,7 @@ function Roles() { return ; } - if (loading) { + if (networkStatus === NetworkStatus.loading) { return ; } @@ -95,18 +119,23 @@ function Roles() { return ( <>
-
-
{/* Search input + filter button */}
- -
- -
+
+ setSearch(e.target.value)} + placeholder="Search roles" + className="border-none focus-visible:ring-0" + containerClassName="flex-grow" + /> + +
setShowDrawer(value)} + onClose={() => setRowToUpdate(null)} > setShowDrawer(false)} + onCancel={() => { + setRowToUpdate(null); + setShowDrawer(false); + }} onSuccess={() => { setShowDrawer(false); globalRefetch(); diff --git a/frontend/app/src/screens/schema/schema-help-menu.tsx b/frontend/app/src/screens/schema/schema-help-menu.tsx index 91532d6642..296691d950 100644 --- a/frontend/app/src/screens/schema/schema-help-menu.tsx +++ b/frontend/app/src/screens/schema/schema-help-menu.tsx @@ -6,10 +6,10 @@ import { DropdownMenuTrigger, } from "@/components/ui/dropdown-menu"; import { INFRAHUB_DOC_LOCAL } from "@/config/config"; -import { IModelSchema, menuFlatAtom } from "@/state/atoms/schema.atom"; -import { constructPath } from "@/utils/fetch"; +import { MENU_EXCLUDELIST } from "@/config/constants"; +import { IModelSchema } from "@/state/atoms/schema.atom"; +import { getObjectDetailsUrl2 } from "@/utils/objects"; import { Icon } from "@iconify-icon/react"; -import { useAtomValue } from "jotai/index"; import { Link } from "react-router-dom"; type SchemaHelpMenuProps = { @@ -17,14 +17,12 @@ type SchemaHelpMenuProps = { }; export const SchemaHelpMenu = ({ schema }: SchemaHelpMenuProps) => { - const menuItems = useAtomValue(menuFlatAtom); - const schemaInMenu = menuItems.find(({ label }) => label === schema.label); + const isListViewDisabled = MENU_EXCLUDELIST.includes(schema.kind as string); const documentationUrl = schema.documentation ? `${INFRAHUB_DOC_LOCAL}${schema.documentation}` : INFRAHUB_DOC_LOCAL; - const objectListUrl = schemaInMenu ? constructPath(schemaInMenu.path) : ""; return ( @@ -42,8 +40,8 @@ export const SchemaHelpMenu = ({ schema }: SchemaHelpMenuProps) => { - - + + Open list view diff --git a/frontend/app/src/screens/schema/schema-page-header.tsx b/frontend/app/src/screens/schema/schema-page-header.tsx deleted file mode 100644 index 4273e1971f..0000000000 --- a/frontend/app/src/screens/schema/schema-page-header.tsx +++ /dev/null @@ -1,14 +0,0 @@ -import { ReactElement } from "react"; - -type SchemaPageHeaderProps = { - title: ReactElement | string; - description?: string; -}; -export const SchemaPageHeader = ({ title, description }: SchemaPageHeaderProps) => { - return ( -
-

{title}

- {description &&

{description}

} -
- ); -}; diff --git a/frontend/app/src/screens/schema/schema-selector.tsx b/frontend/app/src/screens/schema/schema-selector.tsx index bfcd0fd2e7..2ea765d3c3 100644 --- a/frontend/app/src/screens/schema/schema-selector.tsx +++ b/frontend/app/src/screens/schema/schema-selector.tsx @@ -1,12 +1,13 @@ import Accordion from "@/components/display/accordion"; import { Badge } from "@/components/ui/badge"; +import { SearchInput } from "@/components/ui/search-input"; import { QSP } from "@/config/qsp"; import { IModelSchema, genericsState, profilesAtom, schemaState } from "@/state/atoms/schema.atom"; import { classNames, isGeneric } from "@/utils/common"; import { Icon } from "@iconify-icon/react"; import { useAtomValue } from "jotai"; import * as R from "ramda"; -import { useEffect, useRef } from "react"; +import { useEffect, useRef, useState } from "react"; import { ArrayParam, useQueryParam } from "use-query-params"; type SchemaSelectorProps = { @@ -17,6 +18,7 @@ export const SchemaSelector = ({ className = "" }: SchemaSelectorProps) => { const nodes = useAtomValue(schemaState); const generics = useAtomValue(genericsState); const profiles = useAtomValue(profilesAtom); + const [search, setSearch] = useState(""); const ref = useRef(null); useEffect(() => { @@ -25,7 +27,10 @@ export const SchemaSelector = ({ className = "" }: SchemaSelectorProps) => { ref.current.scrollIntoView({ behavior: "smooth", block: "nearest" }); }, [selectedKind?.length]); - const schemas: IModelSchema[] = [...nodes, ...generics, ...profiles]; + const schemas: IModelSchema[] = [...nodes, ...generics, ...profiles].filter(({ kind }) => + kind?.toLowerCase().includes(search.toLowerCase()) + ); + const schemasPerNamespace = R.pipe( R.sortBy(R.prop("name")), R.groupBy(R.prop("namespace")) @@ -33,6 +38,13 @@ export const SchemaSelector = ({ className = "" }: SchemaSelectorProps) => { return (
+ setSearch(e.target.value)} + /> + {Object.entries(schemasPerNamespace).map(([namespace, schemas]) => { return ( diff --git a/frontend/app/src/screens/schema/types.ts b/frontend/app/src/screens/schema/types.ts index e2e4188341..39bd110fad 100644 --- a/frontend/app/src/screens/schema/types.ts +++ b/frontend/app/src/screens/schema/types.ts @@ -1,5 +1,5 @@ import { components } from "@/infraops"; -export type RelationshipSchema = components["schemas"]["RelationshipSchema-Output"]; +export type RelationshipSchema = components["schemas"]["RelationshipSchema"]; export type AttributeSchema = components["schemas"]["AttributeSchema-Output"]; diff --git a/frontend/app/src/screens/schema/utils.ts b/frontend/app/src/screens/schema/utils.ts new file mode 100644 index 0000000000..0008d811b1 --- /dev/null +++ b/frontend/app/src/screens/schema/utils.ts @@ -0,0 +1,8 @@ +import { IModelSchema } from "@/state/atoms/schema.atom"; +import { isGeneric } from "@/utils/common"; + +export const isOfKind = (kind: string, schema: IModelSchema) => { + if (schema.kind === kind) return true; + if (!isGeneric(schema) && schema.inherit_from?.includes(kind)) return true; + return false; +}; diff --git a/frontend/app/src/screens/user-profile/token-schema.ts b/frontend/app/src/screens/user-profile/token-schema.ts index 3439dd2697..17000d6763 100644 --- a/frontend/app/src/screens/user-profile/token-schema.ts +++ b/frontend/app/src/screens/user-profile/token-schema.ts @@ -600,63 +600,3 @@ export const tokenSchema = { kind: "InfrahubAccountToken", hash: "2cfd9af9d6fcac49527fca295de251b8", }; - -export const apiSchema = `{ - "version": "string", - "schemas": [ - { - "version": "string", - "nodes": [ - { - "state": "present", - "name": "AccountToken", - "namespace": "Test", - "description": "A User Token used for API access.", - "label": "Account Token", - "branch": "aware", - "display_labels": [ - "name__value" - ], - "include_in_menu": true, - "attributes": [ - { - "name": "name", - "kind": "Text", - "description": "Name of the user token." - }, - { - "name": "api_key", - "kind": "Text", - "description": "API key associated with the user token." - }, - { - "name": "status", - "kind": "Dropdown", - "choices": [ - { - "name": "active", - "label": "Active", - "description": "Token is active and usable.", - "color": "#009933" - }, - { - "name": "inactive", - "label": "Inactive", - "description": "Token is inactive and not usable.", - "color": "#cc0000" - } - ], - "description": "Status of the user token." - }, - { - "name": "timestamp", - "kind": "DateTime", - "description": "Timestamp of when the token was created or last used." - } - ], - "relationships": [] - } - ] - } - ] -}`; diff --git a/frontend/app/src/state/atoms/filters.atom.ts b/frontend/app/src/state/atoms/filters.atom.ts deleted file mode 100644 index 8bab260c57..0000000000 --- a/frontend/app/src/state/atoms/filters.atom.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { atom } from "jotai"; - -// Need to save the data type of the fiter value. Could be string | number | boolean -export interface iComboBoxFilter { - name: string; - value: string; - display_label?: string; -} - -export const comboxBoxFilterState = atom([]); diff --git a/frontend/app/src/state/atoms/schema.atom.ts b/frontend/app/src/state/atoms/schema.atom.ts index 6c2770bbb8..0f5aabc5b3 100644 --- a/frontend/app/src/state/atoms/schema.atom.ts +++ b/frontend/app/src/state/atoms/schema.atom.ts @@ -11,7 +11,7 @@ export const genericsState = atom([]); export type IProfileSchema = components["schemas"]["APIProfileSchema"]; export const profilesAtom = atom([]); -export type IModelSchema = iGenericSchema | iNodeSchema; +export type IModelSchema = iGenericSchema | iNodeSchema | IProfileSchema; export type iNamespace = { name: string; diff --git a/frontend/app/src/utils/checks.ts b/frontend/app/src/utils/checks.ts index 7c340dda70..c6206688d5 100644 --- a/frontend/app/src/utils/checks.ts +++ b/frontend/app/src/utils/checks.ts @@ -1,10 +1,4 @@ -import { - CHECKS_LABEL, - CHECK_CONCLUSIONS, - CHECK_SEVERITY, - VALIDATION_CONCLUSIONS, - VALIDATION_STATES, -} from "@/config/constants"; +import { CHECKS_LABEL, VALIDATION_CONCLUSIONS, VALIDATION_STATES } from "@/config/constants"; export const getValidatorsStats = (validators: any[]) => { const successValidators = validators.filter( @@ -70,49 +64,3 @@ export const getValidatorsStats = (validators: any[]) => { }, ].filter(Boolean); }; - -export const getChecksStats = (checks: any[]) => { - const inProgressChecks = checks.filter( - (validator: any) => validator.conclusion.value === CHECK_CONCLUSIONS.UNKNOWN - ); - - const successChecks = checks.filter( - (validator: any) => - validator.severity.value === CHECK_SEVERITY.SUCCESS && - validator.conclusion.value === CHECK_CONCLUSIONS.SUCCESS - ); - - const infoChecks = checks.filter( - (validator: any) => - validator.severity.value === CHECK_SEVERITY.INFO && - validator.conclusion.value === CHECK_CONCLUSIONS.SUCCESS - ); - - const warningChecks = checks.filter( - (validator: any) => - validator.severity.value === CHECK_SEVERITY.WARNING && - validator.conclusion.value === CHECK_CONCLUSIONS.FAILURE - ); - - const errorChecks = checks.filter( - (validator: any) => - validator.severity.value === CHECK_SEVERITY.ERROR && - validator.conclusion.value === CHECK_CONCLUSIONS.FAILURE - ); - - const criticalChecks = checks.filter( - (validator: any) => - validator.severity.value === CHECK_SEVERITY.CRITICAL && - validator.conclusion.value === CHECK_CONCLUSIONS.FAILURE - ); - - return { - total: checks.length, - success: successChecks.length, - info: infoChecks.length, - warning: warningChecks.length, - error: errorChecks.length, - critical: criticalChecks.length, - inProgress: inProgressChecks.length, - }; -}; diff --git a/frontend/app/src/utils/common.ts b/frontend/app/src/utils/common.ts index ddfef14451..1b61ef1dba 100644 --- a/frontend/app/src/utils/common.ts +++ b/frontend/app/src/utils/common.ts @@ -43,24 +43,24 @@ export const encodeJwt = (data: any): string => { return `.${btoa(JSON.stringify(data))}`; }; -const DEFAULT_DEBOUNCE = 1000; - -export const debounce = (func: Function, wait = DEFAULT_DEBOUNCE, immediate?: boolean) => { - let timeout: any; - return function executedFunction(this: any) { - const context = this; - // eslint-disable-next-line prefer-rest-params - const args = arguments; - const later = () => { - timeout = null; - if (!immediate) func.apply(context, args); - }; - const callNow = immediate && !timeout; - clearTimeout(timeout); - timeout = setTimeout(later, wait); - if (callNow) func.apply(context, args); +const DEFAULT_DEBOUNCE = 300; + +export function debounce any>( + func: T, + delay: number = DEFAULT_DEBOUNCE +): (...args: Parameters) => void { + let timeoutId: ReturnType | null = null; + + return function (this: ThisParameterType, ...args: Parameters): void { + if (timeoutId) { + clearTimeout(timeoutId); + } + + timeoutId = setTimeout(() => { + func.apply(this, args); + }, delay); }; -}; +} // https://fontawesomeicons.com/fa/react-js-change-text-color-based-on-brightness-background const calculateBrightness = (color: string) => { diff --git a/frontend/app/src/utils/fetch.ts b/frontend/app/src/utils/fetch.ts index 2c6bc2b994..c841abfc43 100644 --- a/frontend/app/src/utils/fetch.ts +++ b/frontend/app/src/utils/fetch.ts @@ -92,29 +92,6 @@ export const constructPath = ( export const getCurrentQsp = () => new URL(window.location.href).searchParams; -// Update a QSP in the URL (add, update or remove it) -export const updateQsp = (qsp: string, newValue: string, setSearchParams: Function) => { - const { href } = window.location; - - const url = new URL(href); - - const { searchParams } = url; - - // Get QSP as [ [ key, value ], ... ] - const params = [...Array.from(searchParams), [qsp, newValue]]; - - // Construct the new params as { [name]: value } - const newParams = params.reduce( - (acc, [k, v]) => ({ - ...acc, - [k]: v, - }), - {} - ); - - return setSearchParams(newParams); -}; - export const getUrlWithQsp = (url: string, options: any[]) => { const qsp = new URLSearchParams(options); diff --git a/frontend/app/src/utils/formStructureForFilters.ts b/frontend/app/src/utils/formStructureForFilters.ts deleted file mode 100644 index 07b4c0887c..0000000000 --- a/frontend/app/src/utils/formStructureForFilters.ts +++ /dev/null @@ -1,68 +0,0 @@ -import { DynamicFieldData } from "@/screens/edit-form-hook/dynamic-control-types"; -import { iComboBoxFilter } from "@/state/atoms/filters.atom"; - -const getFormStructureForFilters = ( - schema: any, - currentFilters: any, - peerDropdownOptions: any -): DynamicFieldData[] => { - return schema.filters - ?.map((filter: any) => { - const currentValue = currentFilters?.find((f: iComboBoxFilter) => f.name === filter.name); - - if (filter.kind === "Number") { - return { - label: filter.name, - name: filter.name, - type: "number", - value: currentValue ?? "", - }; - } - - if (filter.kind === "Text" && !filter.enum) { - return { - label: filter.name, - name: filter.name, - type: "text", - value: currentValue ?? "", - }; - } - - if (filter.kind === "Text" && filter.enum) { - return { - label: filter.name, - name: filter.name, - type: "select", - value: currentValue ?? "", - options: filter.enum?.map((row: any) => ({ - name: row, - id: row, - })), - }; - } - - if (filter.kind === "Object") { - if (filter.object_kind && peerDropdownOptions && peerDropdownOptions[filter.object_kind]) { - const { edges } = peerDropdownOptions[filter.object_kind]; - - const options = edges.map((row: any) => ({ - name: row.node.display_label, - id: row.node.id, - })); - - return { - label: filter.name, - name: filter.name, - type: "select", - value: currentValue ? currentValue.value : "", - options, - }; - } - } - - return null; - }) - .filter(Boolean); -}; - -export default getFormStructureForFilters; diff --git a/frontend/app/src/utils/getMutationMetaDetailsFromFormData.ts b/frontend/app/src/utils/getMutationMetaDetailsFromFormData.ts index 8266fc48a7..95103fe098 100644 --- a/frontend/app/src/utils/getMutationMetaDetailsFromFormData.ts +++ b/frontend/app/src/utils/getMutationMetaDetailsFromFormData.ts @@ -1,7 +1,5 @@ import { iNodeSchema } from "@/state/atoms/schema.atom"; -export type MutationMode = "create" | "update"; - const metadataFields = ["source", "owner", "is_visible", "is_protected"]; const isValueValid = (value: any) => { diff --git a/frontend/app/src/utils/getObjectItemDisplayValue.tsx b/frontend/app/src/utils/getObjectItemDisplayValue.tsx index d1c5ff76db..e6974bc011 100644 --- a/frontend/app/src/utils/getObjectItemDisplayValue.tsx +++ b/frontend/app/src/utils/getObjectItemDisplayValue.tsx @@ -19,8 +19,8 @@ import { RelationshipProperty, TextAttribute, } from "@/generated/graphql"; -import { components } from "@/infraops"; import { SchemaAttributeType } from "@/screens/edit-form-hook/dynamic-control-types"; +import { AttributeSchema, RelationshipSchema } from "@/screens/schema/types"; import { iSchemaKindNameMap } from "@/state/atoms/schemaKindName.atom"; import { CheckIcon, XMarkIcon } from "@heroicons/react/24/outline"; @@ -150,9 +150,7 @@ export const getObjectItemDisplayValue = ( ); }; -export type FieldSchema = - | components["schemas"]["AttributeSchema-Output"] - | components["schemas"]["RelationshipSchema-Output"]; +export type FieldSchema = AttributeSchema | RelationshipSchema; export type AttributeType = | TextAttribute diff --git a/frontend/app/src/utils/getSchemaObjectColumns.ts b/frontend/app/src/utils/getSchemaObjectColumns.ts index 82541fd0bd..45cb30bf12 100644 --- a/frontend/app/src/utils/getSchemaObjectColumns.ts +++ b/frontend/app/src/utils/getSchemaObjectColumns.ts @@ -1,4 +1,3 @@ -import { SelectOption } from "@/components/inputs/select"; import { attributesKindForDetailsViewExclude, attributesKindForListView, @@ -185,46 +184,22 @@ export const getRelationshipOptions = (row: any, field: any, schemas: any[], gen return [option]; }; -export const getOptionsFromAttribute = (attribute: any, value: any): Array => { - if (attribute.kind === "List") { - return (value || [])?.map((option: any) => ({ - name: option, - id: option, - })); - } - - if (attribute.enum) { - return attribute.enum?.map((option: any) => ({ - name: option, - id: option, - })); - } - - if (attribute.choices) { - return attribute.choices?.map((option: any) => ({ - ...option, - name: option.label, - id: option.name, - })); - } - - return []; -}; - type tgetOptionsFromRelationship = { options: any[]; schemas?: any; generic?: any; + peerField?: string; }; export const getOptionsFromRelationship = ({ options, schemas, generic, + peerField, }: tgetOptionsFromRelationship) => { if (!generic) { return options.map((option: any) => ({ - name: option.display_label, + name: peerField ? (option[peerField]?.value ?? option[peerField]) : option.display_label, id: option.id, kind: option.__typename, })); diff --git a/frontend/app/src/utils/objects.ts b/frontend/app/src/utils/objects.ts index a36cfe53bf..a42e927e35 100644 --- a/frontend/app/src/utils/objects.ts +++ b/frontend/app/src/utils/objects.ts @@ -25,11 +25,11 @@ export const getObjectDetailsUrl2 = ( overrideParams?: overrideQueryParams[] ) => { if (objectKind === IP_PREFIX_GENERIC) { - return constructPathForIpam(`${IPAM_ROUTE.PREFIXES}/${objectId}`, overrideParams); + return constructPathForIpam(`${IPAM_ROUTE.PREFIXES}/${objectId ?? ""}`, overrideParams); } if (objectKind === IP_ADDRESS_GENERIC) { - return constructPathForIpam(`${IPAM_ROUTE.ADDRESSES}/${objectId}`, [ + return constructPathForIpam(`${IPAM_ROUTE.ADDRESSES}/${objectId ?? ""}`, [ { name: IPAM_QSP.TAB, value: "ip-details" }, ...(overrideParams ?? []), ]); @@ -45,18 +45,18 @@ export const getObjectDetailsUrl2 = ( const inheritFrom = schema.inherit_from; if (inheritFrom?.includes(IP_PREFIX_GENERIC)) { - return constructPathForIpam(`${IPAM_ROUTE.PREFIXES}/${objectId}`, overrideParams); + return constructPathForIpam(`${IPAM_ROUTE.PREFIXES}/${objectId ?? ""}`, overrideParams); } if (inheritFrom?.includes(IP_ADDRESS_GENERIC)) { - return constructPathForIpam(`${IPAM_ROUTE.ADDRESSES}/${objectId}`, [ + return constructPathForIpam(`${IPAM_ROUTE.ADDRESSES}/${objectId ?? ""}`, [ { name: IPAM_QSP.TAB, value: "ip-details" }, ...(overrideParams ?? []), ]); } if (inheritFrom?.includes(RESOURCE_GENERIC_KIND)) { - return constructPathForIpam(`/resource-manager/${objectId}`, overrideParams); + return constructPathForIpam(`/resource-manager/${objectId ?? ""}`, overrideParams); } } diff --git a/frontend/app/src/utils/string.tsx b/frontend/app/src/utils/string.tsx index 417f3f10b5..0576461b03 100644 --- a/frontend/app/src/utils/string.tsx +++ b/frontend/app/src/utils/string.tsx @@ -14,15 +14,9 @@ export const stringifyWithoutQuotes = (obj: object): string => { return JSON.stringify(obj, null, 4).replace(/"([^"]+)":/g, "$1:"); }; -export const cleanTabsAndNewLines = (string: string) => { - return string.replaceAll(/\t*\n*/g, "").replaceAll(/\s+/g, " "); -}; - export const capitalizeFirstLetter = (string: string) => { return string.charAt(0).toUpperCase() + string.slice(1).toLowerCase(); }; -export const concatString = (acc: string, elem: string) => `${acc}${elem}`; - export const pluralize = (count: number, word: string, suffix = "s") => `${count} ${word}${count > 1 ? suffix : ""}`; diff --git a/frontend/app/tests/constants.ts b/frontend/app/tests/constants.ts index 153680c3b2..8b80fa29d8 100644 --- a/frontend/app/tests/constants.ts +++ b/frontend/app/tests/constants.ts @@ -1,32 +1,20 @@ -type ScreenshotConfig = { - overwrite: boolean; - scale: boolean; -}; - -export const screenshotConfig: ScreenshotConfig = { - overwrite: true, - scale: true, -}; - -export const SCREENSHOT_ENV_VARIABLE = "SCREENSHOTS"; - export const ADMIN_CREDENTIALS = { username: "admin", password: "infrahub", }; export const READ_WRITE_CREDENTIALS = { - username: "Chloe O'Brian", + username: "cobrian", password: "Password123", }; export const READ_ONLY_CREDENTIALS = { - username: "Jack Bauer", + username: "jbauer", password: "Password123", }; export const ENG_TEAM_ONLY_CREDENTIALS = { - username: "Engineering Team", + username: "shernandez", password: "Password123", }; diff --git a/frontend/app/tests/e2e/branches.spec.ts b/frontend/app/tests/e2e/branches.spec.ts index bb8ca2f003..5fa24ea8fa 100644 --- a/frontend/app/tests/e2e/branches.spec.ts +++ b/frontend/app/tests/e2e/branches.spec.ts @@ -90,4 +90,12 @@ test.describe("Branches creation and deletion", () => { await expect(page.getByTestId("branch-list")).not.toContainText("test123"); }); }); + + test("allow to create a branch with a name that does not exists", async ({ page }) => { + await page.goto("/"); + await page.getByTestId("branch-selector-trigger").click(); + await page.getByTestId("branch-search-input").fill("quick-branch-form"); + await page.getByRole("option", { name: "Create branch quick-branch-form" }).click(); + await expect(page.getByLabel("New branch name *")).toHaveValue("quick-branch-form"); + }); }); diff --git a/frontend/app/tests/e2e/objects/artifact.spec.ts b/frontend/app/tests/e2e/objects/artifact.spec.ts index 9cb6f1baa7..497f891133 100644 --- a/frontend/app/tests/e2e/objects/artifact.spec.ts +++ b/frontend/app/tests/e2e/objects/artifact.spec.ts @@ -3,6 +3,7 @@ import { ACCOUNT_STATE_PATH } from "../../constants"; test.describe("/objects/CoreArtifact - Artifact page", () => { test.describe.configure({ mode: "serial" }); + test.use({ storageState: ACCOUNT_STATE_PATH.ADMIN }); test.beforeEach(async function ({ page }) { page.on("response", async (response) => { diff --git a/frontend/app/tests/e2e/objects/object-filters.spec.ts b/frontend/app/tests/e2e/objects/object-filters.spec.ts index 8c4eb74dfa..fa3f37ec4d 100644 --- a/frontend/app/tests/e2e/objects/object-filters.spec.ts +++ b/frontend/app/tests/e2e/objects/object-filters.spec.ts @@ -81,7 +81,9 @@ test.describe("Object filters", () => { await page.getByRole("option", { name: "atl1-core1" }).click(); await page.getByRole("button", { name: "Apply filters" }).click(); - await expect(page.getByRole("row", { name: "InfraInterfaceL3 Loopback0" })).toBeVisible(); + await expect( + page.getByRole("row", { name: "InfraInterfaceL3 atl1-core1 Loopback0" }) + ).toBeVisible(); await expect(page.getByRole("link", { name: "Connected to jfk1-edge2" })).toBeHidden(); }); diff --git a/frontend/app/tests/e2e/objects/object-metadata.spec.ts b/frontend/app/tests/e2e/objects/object-metadata.spec.ts index fb30acb35d..3c8385bdcf 100644 --- a/frontend/app/tests/e2e/objects/object-metadata.spec.ts +++ b/frontend/app/tests/e2e/objects/object-metadata.spec.ts @@ -38,8 +38,8 @@ test.describe("Object metadata", () => { // Select Architecture team await page.getByText("Owner Kind ?").getByLabel("Kind").first().click(); - await page.getByRole("option", { name: "Account" }).click(); - await page.getByText("Owner Kind ?").getByLabel("Account").click(); + await page.getByRole("option", { name: "Account group" }).click(); + await page.getByText("Owner Kind ?").getByLabel("Account group").click(); await page.getByRole("option", { name: "Architecture Team" }).click(); // Save @@ -61,7 +61,7 @@ test.describe("Object metadata", () => { await metadataTooltipUpdated.getByTestId("edit-metadata-button").click(); // Source should be Account + Pop-Builder - await expect(page.getByTestId("select-input").nth(0)).toHaveValue("Account"); + await expect(page.getByTestId("select-input").nth(0)).toHaveValue("Account group"); await expect(page.getByTestId("select-input").nth(1)).toHaveValue("Architecture Team"); // Is protected should be checked diff --git a/frontend/app/tests/e2e/permissions/role-management.spec.ts b/frontend/app/tests/e2e/permissions/role-management.spec.ts index 12fe2b9497..dbfcf939a1 100644 --- a/frontend/app/tests/e2e/permissions/role-management.spec.ts +++ b/frontend/app/tests/e2e/permissions/role-management.spec.ts @@ -1,7 +1,7 @@ import { expect, test } from "@playwright/test"; import { ACCOUNT_STATE_PATH } from "../../constants"; -test.describe("Role Management - Admin", () => { +test.describe("Users & Permissions - Admin", () => { test.use({ storageState: ACCOUNT_STATE_PATH.ADMIN }); test("should be allowed to add accounts", async ({ page }) => { diff --git a/frontend/app/tests/e2e/profile/profile.spec.ts b/frontend/app/tests/e2e/profile/profile.spec.ts index 65756b541e..26be89955a 100644 --- a/frontend/app/tests/e2e/profile/profile.spec.ts +++ b/frontend/app/tests/e2e/profile/profile.spec.ts @@ -52,7 +52,7 @@ test.describe("/profile", () => { await expect( page.getByRole("heading", { name: "Chloe O'Brian", exact: true }) ).toBeVisible(); - await expect(page.getByText("NameChloe O'Brian")).toBeVisible(); + await expect(page.getByText("LabelChloe O'Brian")).toBeVisible(); await expect(page.getByText("Roleread-write")).toBeVisible(); }); }); @@ -70,7 +70,7 @@ test.describe("/profile", () => { await test.step("display account details", async () => { await expect(page.getByRole("heading", { name: "Jack Bauer", exact: true })).toBeVisible(); - await expect(page.getByText("NameJack Bauer")).toBeVisible(); + await expect(page.getByText("LabelJack Bauer")).toBeVisible(); await expect(page.getByText("Roleread-only")).toBeVisible(); }); }); diff --git a/frontend/app/tests/e2e/proposed-changes/proposed-changes.spec.ts b/frontend/app/tests/e2e/proposed-changes/proposed-changes.spec.ts index 00d3377d8d..d9c02e4bc3 100644 --- a/frontend/app/tests/e2e/proposed-changes/proposed-changes.spec.ts +++ b/frontend/app/tests/e2e/proposed-changes/proposed-changes.spec.ts @@ -71,8 +71,8 @@ test.describe("/proposed-changes", () => { await page.getByLabel("Name *").fill(pcName); await page.getByTestId("codemirror-editor").getByRole("textbox").fill("My description"); await page.getByTestId("select-open-option-button").click(); - await page.getByRole("option", { name: "Architecture Team" }).click(); - await page.getByRole("option", { name: "Crm Synchronization" }).click(); + await page.getByRole("option", { name: "Olivia Carter" }).click(); + await page.getByRole("option", { name: "CRM Synchronization" }).click(); await page.getByTestId("select-open-option-button").click(); await page.getByRole("button", { name: "Create proposed change" }).click(); @@ -103,7 +103,7 @@ test.describe("/proposed-changes", () => { await expect(page.getByRole("heading", { name: pcNameEdit, exact: true })).toBeVisible(); await expect(page.getByTestId("pc-description")).toContainText("My description edit"); - await expect(page.getByText("ReviewersAT")).toBeVisible(); + await expect(page.getByText("ReviewersOC")).toBeVisible(); }); }); diff --git a/frontend/app/tests/e2e/role-management/read.spec.ts b/frontend/app/tests/e2e/role-management/read.spec.ts index 81edabecae..d916fc180b 100644 --- a/frontend/app/tests/e2e/role-management/read.spec.ts +++ b/frontend/app/tests/e2e/role-management/read.spec.ts @@ -1,41 +1,48 @@ import { expect, test } from "@playwright/test"; +import { ACCOUNT_STATE_PATH } from "../../constants"; test.describe("Role management - READ", () => { + test.use({ storageState: ACCOUNT_STATE_PATH.ADMIN }); + test("should read correctly the different views", async ({ page }) => { await test.step("access main view", async () => { await page.goto("/role-management"); }); await test.step("check counts", async () => { - await expect(page.getByRole("link", { name: "Accounts 9" })).toBeVisible(); - await expect(page.getByRole("link", { name: "Groups 2" })).toBeVisible(); - await expect(page.getByRole("link", { name: "Roles 2" })).toBeVisible(); - await expect(page.getByRole("link", { name: "Global Permissions 4" })).toBeVisible(); - await expect(page.getByRole("link", { name: "Object Permissions 2" })).toBeVisible(); + await expect(page.getByRole("link", { name: "Accounts 12" })).toBeVisible(); + await expect(page.getByRole("link", { name: "Groups 6" })).toBeVisible(); + await expect(page.getByRole("link", { name: "Roles 7" })).toBeVisible(); + await expect(page.getByRole("link", { name: "Global Permissions 8" })).toBeVisible(); + await expect(page.getByRole("link", { name: "Object Permissions 4" })).toBeVisible(); }); await test.step("check accounts view", async () => { - await expect(page.getByRole("cell", { name: "Admin" })).toBeVisible(); + await expect(page.getByRole("cell", { name: "admin", exact: true })).toBeVisible(); await expect(page.getByRole("cell", { name: "Pop-Builder" })).toBeVisible(); }); await test.step("check groups view", async () => { - await page.getByRole("link", { name: "Groups 2" }).click(); - await expect(page.getByRole("cell", { name: "Administrators" })).toBeVisible(); - await expect(page.getByRole("cell", { name: "+ 4" })).toBeVisible(); + await page.getByRole("link", { name: "Groups 6" }).click(); + await expect(page.getByText("Showing 1 to 6 of 6 results")).toBeVisible(); + await expect( + page.getByTestId("breadcrumb-navigation").getByRole("link", { name: "Groups" }) + ).toBeVisible(); + await expect(page.getByRole("cell", { name: "Operations Team" })).toBeVisible(); }); await test.step("check roles view", async () => { - await page.getByRole("link", { name: "Roles 2" }).click(); - await expect(page.getByRole("cell", { name: "Super Administrator" })).toBeVisible(); + await page.getByRole("link", { name: "Roles 7" }).click(); + await expect(page.getByText("General Access")).toBeVisible(); + await expect(page.getByText("Infrahub Users")).toBeVisible(); + await expect(page.getByText("global:edit_default_branch:")).toBeVisible(); await expect(page.getByRole("cell", { name: "1" }).first()).toBeVisible(); }); await test.step("check global permissions view", async () => { await page.getByRole("link", { name: "Global Permissions" }).click(); - await expect(page.getByRole("cell", { name: "Super Admin" })).toBeVisible(); - await expect(page.getByRole("cell", { name: "1" }).first()).toBeVisible(); - await expect(page.getByText("global:super_admin:allow")).toBeVisible(); + await expect(page.getByRole("cell", { name: "super_admin", exact: true })).toBeVisible(); + await expect(page.getByText("global:super_admin:")).toBeVisible(); }); }); }); diff --git a/frontend/app/tests/e2e/schema.spec.ts b/frontend/app/tests/e2e/schema.spec.ts index 6f616b2b2e..76106b52d0 100644 --- a/frontend/app/tests/e2e/schema.spec.ts +++ b/frontend/app/tests/e2e/schema.spec.ts @@ -46,4 +46,13 @@ test.describe("/schema - Schema visualizer", () => { await expect(page.getByRole("menuitem", { name: "Open list view" })).toBeEnabled(); }); }); + + test("filter schema list", async ({ page }) => { + await page.goto("/schema"); + await expect(page.getByRole("heading", { name: "Core Account Node" })).toBeVisible(); + + await page.getByPlaceholder("Search schema").fill("tag"); + await expect(page.getByRole("heading", { name: "Builtin Tag Node" })).toBeVisible(); + await expect(page.getByRole("heading", { name: "Core Account Node" })).not.toBeVisible(); + }); }); diff --git a/frontend/app/tests/e2e/tutorial/tutorial-2_data-lineage-and-metadata.spec.ts b/frontend/app/tests/e2e/tutorial/tutorial-2_data-lineage-and-metadata.spec.ts index d8056d2a79..25a9274248 100644 --- a/frontend/app/tests/e2e/tutorial/tutorial-2_data-lineage-and-metadata.spec.ts +++ b/frontend/app/tests/e2e/tutorial/tutorial-2_data-lineage-and-metadata.spec.ts @@ -28,7 +28,7 @@ test.describe("Getting started with Infrahub - Data lineage and metadata", () => await test.step("Update Description attribute to make it protected", async () => { await page.getByTestId("edit-metadata-button").click(); await page.getByLabel("Kind").first().click(); - await page.getByRole("option", { name: "Account" }).click(); + await page.getByRole("option", { name: "Account" }).first().click(); await page.getByLabel("Account").click(); await page.getByRole("option", { name: "Admin" }).click(); await page.getByLabel("is protected *").check(); @@ -39,10 +39,5 @@ test.describe("Getting started with Infrahub - Data lineage and metadata", () => await expect(page.getByText("Is protectedTrue")).toBeVisible(); }); - - await test.step("Not allowed to updated description because user is not admin", async () => { - await page.getByTestId("edit-button").click(); - await expect(page.getByLabel("Description")).toBeDisabled(); - }); }); }); diff --git a/frontend/app/tests/e2e/tutorial/tutorial-4_integration-with-git.spec.ts b/frontend/app/tests/e2e/tutorial/tutorial-4_integration-with-git.spec.ts index 208d2535d7..d90ba44fca 100644 --- a/frontend/app/tests/e2e/tutorial/tutorial-4_integration-with-git.spec.ts +++ b/frontend/app/tests/e2e/tutorial/tutorial-4_integration-with-git.spec.ts @@ -29,8 +29,7 @@ test.describe("Getting started with Infrahub - Integration with Git", () => { await test.step("go to interface Ethernet 1 for atl1-edge1", async () => { await page.getByTestId("sidebar").getByRole("button", { name: "Device Management" }).click(); - await page.getByRole("link", { name: "Device" }).click(); - await page.getByLabel("Device Management").press("Escape"); + await page.getByRole("menuitem", { name: "Device", exact: true }).click(); await expect(page.getByText("Generic Device object")).toBeVisible(); await page.getByRole("link", { name: "atl1-edge1" }).click(); diff --git a/frontend/app/tests/integrations/screens/artifact-diff.cy.tsx b/frontend/app/tests/integrations/screens/artifact-diff.cy.tsx index 1999a0195e..9eee02f5d5 100644 --- a/frontend/app/tests/integrations/screens/artifact-diff.cy.tsx +++ b/frontend/app/tests/integrations/screens/artifact-diff.cy.tsx @@ -182,9 +182,6 @@ describe("Artifact Diff", () => { before(() => { const data = { sub: profileId, - user_claims: { - role: "admin", - }, }; const token = encodeJwt(data); diff --git a/frontend/app/tests/mocks/data/account.ts b/frontend/app/tests/mocks/data/account.ts index 8ad73e2749..09f496d896 100644 --- a/frontend/app/tests/mocks/data/account.ts +++ b/frontend/app/tests/mocks/data/account.ts @@ -1,5 +1,3 @@ -export const accountId = "bfb5c658-d606-47b1-b614-d2e44e6d3e67"; - export const accountDetailsMocksSchema = [ { id: "17e3078a-8ef3-b130-2781-179f619825a0", @@ -871,178 +869,3 @@ export const accountDetailsMocksSchema = [ hash: "97aba0a736b3e673e0fdd33b32757c1c", }, ]; - -export const accountDetailsMocksQuery = `query CoreAccount { - CoreAccount (ids: ["${accountId}"] ) { - edges { - node { - id - display_label - profiles { - edges { - node { - display_label - id - } - } - } - name { - value - updated_at - is_from_profile - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - label { - value - updated_at - is_from_profile - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - description { - value - updated_at - is_from_profile - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - account_type { - value - updated_at - is_from_profile - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - role { - value - updated_at - is_from_profile - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - } - } - } -} -`; - -export const accountDetailsMocksData = { - CoreAccount: { - edges: [ - { - node: { - id: accountId, - display_label: "Admin", - name: { - value: "admin", - updated_at: "2023-07-03T06:51:06.645925+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - password: { - value: "$2b$12$9/3ivk9fIDWah40iXsCn1ubiwkCKNIuyOlUww1wVJ6CuQ2Q2u8wAS", - updated_at: "2023-07-03T06:51:06.645925+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - label: { - value: "Admin", - updated_at: "2023-07-03T06:51:06.645925+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - description: { - value: null, - updated_at: "2023-07-03T06:51:06.645925+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - account_type: { - value: "User", - updated_at: "2023-07-03T06:51:06.645925+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - role: { - value: "admin", - updated_at: "2023-07-03T06:51:06.645925+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - __typename: "CoreAccount", - }, - __typename: "EdgedCoreAccount", - }, - ], - __typename: "PaginatedCoreAccount", - }, -}; diff --git a/frontend/app/tests/mocks/data/accountToken.ts b/frontend/app/tests/mocks/data/accountToken.ts deleted file mode 100644 index bb4ca30b9f..0000000000 --- a/frontend/app/tests/mocks/data/accountToken.ts +++ /dev/null @@ -1,1316 +0,0 @@ -import { iNodeSchema } from "../../../src/state/atoms/schema.atom"; - -export const accountTokenId = "bfb5c658-d606-47b1-b614-d2e44e6d3e67"; -export const accountTokenNewDate = "2023-07-14T22:00:00.000Z"; - -export const accountTokenDetailsMocksSchema: iNodeSchema[] = [ - { - id: accountTokenId, - name: "AccountToken", - namespace: "Internal", - description: "Token for User Account", - default_filter: "token__value", - order_by: undefined, - display_labels: ["token__value"], - attributes: [ - { - id: "a860d35c-76e0-4e07-a76c-ce36948a6464", - name: "name", - kind: "Text", - namespace: "Attribute", - label: "Name", - description: undefined, - default_value: undefined, - enum: undefined, - regex: undefined, - max_length: undefined, - min_length: undefined, - inherited: false, - unique: false, - branch: true, - optional: true, - order_weight: 1000, - }, - { - id: "dc040126-b39b-4522-a147-9cbd138f4464", - name: "token", - kind: "Text", - namespace: "Attribute", - label: "Token", - description: undefined, - default_value: undefined, - enum: undefined, - regex: undefined, - max_length: undefined, - min_length: undefined, - inherited: false, - unique: true, - branch: true, - optional: false, - order_weight: 2000, - }, - { - id: "5f156988-5e99-4dff-b3ee-30120a95d344", - name: "expiration", - kind: "DateTime", - namespace: "Attribute", - label: "Expiration", - description: undefined, - default_value: undefined, - enum: undefined, - regex: undefined, - max_length: undefined, - min_length: undefined, - inherited: false, - unique: false, - branch: true, - optional: true, - order_weight: 3000, - }, - ], - relationships: [ - { - id: "8ed7a95f-5b49-4df4-b901-c8cd1d9e6430", - name: "account", - peer: "CoreAccount", - kind: "Generic", - label: "Account", - description: undefined, - identifier: "coreaccount__internalaccounttoken", - inherited: false, - cardinality: "one", - branch: true, - optional: true, - filters: [ - { - name: "id", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "name__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "label__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "description__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "type__value", - kind: "Text", - enum: ["User", "Script", "Bot", "Git"], - object_kind: undefined, - description: undefined, - }, - { - name: "role__value", - kind: "Text", - enum: ["admin", "read-only", "read-write"], - object_kind: undefined, - description: undefined, - }, - ], - order_weight: 4000, - }, - { - id: "78a226e0-7670-4e6b-aac1-77aacfb406d0", - name: "member_of_groups", - peer: "CoreGroup", - kind: "Group", - label: "Member Of Groups", - description: undefined, - identifier: "group_member", - inherited: false, - cardinality: "many", - branch: true, - optional: true, - filters: [ - { - name: "id", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "name__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "label__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "description__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - ], - order_weight: 5000, - }, - { - id: "ee77fcc3-d6fc-4091-a17f-739c3039795f", - name: "subscriber_of_groups", - peer: "CoreGroup", - kind: "Group", - label: "Subscriber Of Groups", - description: undefined, - identifier: "group_subscriber", - inherited: false, - cardinality: "many", - branch: true, - optional: true, - filters: [ - { - name: "id", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "name__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "label__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "description__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - ], - order_weight: 6000, - }, - { - name: "related_nodes", - label: "Related nodes", - cardinality: "many", - peer: "TestNode2", - optional: false, - kind: "Generic", - }, - { - name: "related_nodes_2", - label: "Related nodes", - cardinality: "many", - peer: "TestNode2", - optional: false, - kind: "Component", - }, - ], - label: "Account Token", - inherit_from: [], - groups: [], - branch: true, - filters: [ - { - name: "ids", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "name__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "token__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - ], - kind: "InternalAccountToken", - }, -]; - -// Same schema but with a different name to be allowed to test it even if in the MENU_EXCLUDELIST constant -export const accountTokenDetailsMocksSchemaBIS: iNodeSchema[] = [ - { - id: accountTokenId, - name: "AccountTokenBis", - namespace: "Internal", - description: "Token for User Account", - default_filter: "token__value", - order_by: undefined, - display_labels: ["token__value"], - attributes: [ - { - id: "a860d35c-76e0-4e07-a76c-ce36948a6464", - name: "name", - kind: "Text", - namespace: "Attribute", - label: "Name", - description: undefined, - default_value: undefined, - enum: undefined, - regex: undefined, - max_length: undefined, - min_length: undefined, - inherited: false, - unique: false, - branch: true, - optional: true, - order_weight: 1000, - }, - { - id: "dc040126-b39b-4522-a147-9cbd138f4464", - name: "token", - kind: "Text", - namespace: "Attribute", - label: "Token", - description: undefined, - default_value: undefined, - enum: undefined, - regex: undefined, - max_length: undefined, - min_length: undefined, - inherited: false, - unique: true, - branch: true, - optional: false, - order_weight: 2000, - }, - { - id: "5f156988-5e99-4dff-b3ee-30120a95d344", - name: "expiration", - kind: "DateTime", - namespace: "Attribute", - label: "Expiration", - description: undefined, - default_value: undefined, - enum: undefined, - regex: undefined, - max_length: undefined, - min_length: undefined, - inherited: false, - unique: false, - branch: true, - optional: true, - order_weight: 3000, - }, - ], - relationships: [ - { - id: "8ed7a95f-5b49-4df4-b901-c8cd1d9e6430", - name: "account", - peer: "CoreAccount", - kind: "Generic", - label: "Account", - description: undefined, - identifier: "coreaccount__internalaccounttoken", - inherited: false, - cardinality: "one", - branch: true, - optional: false, - filters: [ - { - name: "id", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "name__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "label__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "description__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "type__value", - kind: "Text", - enum: ["User", "Script", "Bot", "Git"], - object_kind: undefined, - description: undefined, - }, - { - name: "role__value", - kind: "Text", - enum: ["admin", "read-only", "read-write"], - object_kind: undefined, - description: undefined, - }, - ], - order_weight: 4000, - }, - { - id: "78a226e0-7670-4e6b-aac1-77aacfb406d0", - name: "member_of_groups", - peer: "CoreGroup", - kind: "Group", - label: "Member Of Groups", - description: undefined, - identifier: "group_member", - inherited: false, - cardinality: "many", - branch: true, - optional: true, - filters: [ - { - name: "id", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "name__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "label__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "description__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - ], - order_weight: 5000, - }, - { - id: "ee77fcc3-d6fc-4091-a17f-739c3039795f", - name: "subscriber_of_groups", - peer: "CoreGroup", - kind: "Group", - label: "Subscriber Of Groups", - description: undefined, - identifier: "group_subscriber", - inherited: false, - cardinality: "many", - branch: true, - optional: true, - filters: [ - { - name: "id", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "name__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "label__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "description__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - ], - order_weight: 6000, - }, - ], - label: "Account Token", - inherit_from: [], - groups: [], - branch: true, - filters: [ - { - name: "ids", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "name__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - { - name: "token__value", - kind: "Text", - enum: undefined, - object_kind: undefined, - description: undefined, - }, - ], - kind: "InternalAccountTokenBis", - }, -]; - -export const accountTokenDetailsMocksQuery = ` -query InternalAccountToken { - InternalAccountToken (ids: ["${accountTokenId}"]) { - edges { - node { - id - display_label - - name { - value - updated_at - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - token { - value - updated_at - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - expiration { - value - updated_at - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - - - } - } - } -} -`; - -export const accountTokenDetailsMocksQueryBis = ` -query InternalAccountTokenBis { - InternalAccountTokenBis (ids: ["${accountTokenId}"]) { - edges { - node { - id - display_label - - name { - value - updated_at - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - token { - value - updated_at - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - expiration { - value - updated_at - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - account { - node { - id - display_label - __typename - } - properties { - updated_at - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - __typename - } - } - } - } - } - InfrahubTask(related_node__ids: ["${accountTokenId}"]) { - count - } -} -`; - -export const accountTokenDetailsMocksData = { - InternalAccountToken: { - edges: [ - { - node: { - id: accountTokenId, - display_label: "06438eb2-8019-4776-878c-0941b1f1d1ec", - name: { - value: null, - updated_at: "2023-07-12T15:22:03.351221+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - token: { - value: "06438eb2-8019-4776-878c-0941b1f1d1ec", - updated_at: "2023-07-12T15:22:03.351221+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - expiration: { - value: "", - updated_at: "2023-07-13T06:42:11.613885+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - __typename: "InternalAccountToken", - }, - __typename: "EdgedInternalAccountToken", - }, - ], - __typename: "PaginatedInternalAccountToken", - }, -}; - -export const accountTokenDetailsMocksDataBis = { - InternalAccountTokenBis: { - edges: [ - { - node: { - id: accountTokenId, - display_label: "06438eb2-8019-4776-878c-0941b1f1d1ec", - name: { - value: null, - updated_at: "2023-07-12T15:22:03.351221+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - token: { - value: "06438eb2-8019-4776-878c-0941b1f1d1ec", - updated_at: "2023-07-12T15:22:03.351221+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - expiration: { - value: "", - updated_at: "2023-07-13T06:42:11.613885+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - __typename: "InternalAccountTokenBis", - }, - __typename: "EdgedInternalAccountToken", - }, - ], - __typename: "PaginatedInternalAccountToken", - }, -}; - -export const accountTokenDetailsMocksDataWithDate = { - InternalAccountToken: { - edges: [ - { - node: { - id: accountTokenId, - display_label: "06438eb2-8019-4776-878c-0941b1f1d1ec", - name: { - value: null, - updated_at: "2023-07-12T15:22:03.351221+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - token: { - value: "06438eb2-8019-4776-878c-0941b1f1d1ec", - updated_at: "2023-07-12T15:22:03.351221+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - expiration: { - value: accountTokenNewDate, - updated_at: "2023-07-13T06:42:11.613885+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - __typename: "InternalAccountToken", - }, - __typename: "EdgedInternalAccountToken", - }, - ], - __typename: "PaginatedInternalAccountToken", - }, -}; - -export const accountTokenDetailsMocksDataWithDateBis = { - InternalAccountTokenBis: { - edges: [ - { - node: { - id: accountTokenId, - display_label: "06438eb2-8019-4776-878c-0941b1f1d1ec", - name: { - value: null, - updated_at: "2023-07-12T15:22:03.351221+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - token: { - value: "06438eb2-8019-4776-878c-0941b1f1d1ec", - updated_at: "2023-07-12T15:22:03.351221+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - expiration: { - value: accountTokenNewDate, - updated_at: "2023-07-13T06:42:11.613885+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - __typename: "InternalAccountTokenBis", - }, - __typename: "EdgedInternalAccountToken", - }, - ], - __typename: "PaginatedInternalAccountToken", - }, -}; - -export const accountTokenFormStructure = [ - { - name: "name", - kind: "Text", - type: "text", - label: "Name", - value: null, - options: [], - config: {}, - isOptional: true, - isProtected: false, - isReadOnly: undefined, - isUnique: false, - }, - { - name: "token", - kind: "Text", - type: "text", - label: "Token", - value: "06438eb2-8019-4776-878c-0941b1f1d1ec", - options: [], - config: {}, - isOptional: false, - isProtected: false, - isReadOnly: undefined, - isUnique: true, - }, - { - name: "expiration", - kind: "DateTime", - type: "datepicker", - label: "Expiration", - value: "2023-07-14T22:00:00.000Z", - options: [], - config: {}, - isOptional: true, - isProtected: false, - isReadOnly: undefined, - isUnique: false, - }, - { - name: "account", - kind: "String", - peer: "CoreAccount", - type: "select", - label: "Account", - value: "", - options: [], - config: {}, - isOptional: true, - isProtected: false, - }, - { - name: "related_nodes", - kind: "String", - peer: "TestNode2", - type: "multiselect", - label: "Related nodes", - value: "", - options: [], - config: {}, - isOptional: false, - isProtected: false, - isInherited: false, - parent: false, - }, - { - name: "related_nodes_2", - kind: "String", - peer: "TestNode2", - type: "multiselect", - label: "Related nodes", - value: "", - options: [], - config: {}, - isOptional: false, - isProtected: false, - isInherited: false, - parent: false, - }, -]; - -export const accountTokenDetailsUpdateDataMocksData = { - name: "New name", - token: "06438eb2-8019-4776-878c-0941b1f1d1ec", - expiration: "2023-07-15T22:00:00.000Z", - account: { id: "95b04b43-91de-4e29-844d-5655abe696b5" }, -}; - -export const accountTokenDetailsUpdatesMocksData = { - name: { value: "New name" }, - expiration: { value: "2023-07-15T22:00:00.000Z" }, - account: { id: "95b04b43-91de-4e29-844d-5655abe696b5" }, -}; - -export const accountTokenMocksMutation = ` -mutation InternalAccountTokenUpdate { - InternalAccountTokenUpdate (data: { - id: "${accountTokenId}", - name: { - value: "New name" - }, - expiration: { - value: "2023-07-15T22:00:00.000Z" - }, - account: { - id: "95b04b43-91de-4e29-844d-5655abe696b5" - } -}) { - ok - } -} -`; - -export const accountTokenMocksMutationBis = ` -mutation InternalAccountTokenBisUpdate { - InternalAccountTokenBisUpdate (data: { - id: "${accountTokenId}", - name: { - value: "New name" - }, - expiration: { - value: "2023-07-15T22:00:00.000Z" - }, - account: { - id: "95b04b43-91de-4e29-844d-5655abe696b5" - } -}) { - ok - } -} -`; - -export const accountTokenEditMocksQuery = ` -query InternalAccountToken { - InternalAccountToken (ids: ["${accountTokenId}"]) { - edges { - node { - id - display_label - name { - value - updated_at - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - token { - value - updated_at - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - expiration { - value - updated_at - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - } - } - } - CoreAccount { - edges { - node { - id - display_label - } - } - } - CoreGroup { - edges { - node { - id - display_label - } - } - } - CoreGroup { - edges { - node { - id - display_label - } - } - } -} -`; - -export const accountTokenEditMocksQueryBis = ` -query InternalAccountTokenBis { - InternalAccountTokenBis(ids: ["${accountTokenId}"]) { - edges { - node { - id - display_label - name { - value - updated_at - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - token { - value - updated_at - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - expiration { - value - updated_at - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - } - account { - node { - id - display_label - __typename - } - properties { - updated_at - is_protected - is_visible - source { - id - display_label - __typename - } - owner { - id - display_label - __typename - } - __typename - } - } - } - } - } -} -`; - -export const accountTokenEditMocksData = { - InternalAccountToken: { - edges: [ - { - node: { - id: accountTokenId, - display_label: "06438eb2-8019-4776-878c-0941b1f1d1ec", - name: { - value: null, - updated_at: "2023-07-12T15:22:03.351221+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - token: { - value: "06438eb2-8019-4776-878c-0941b1f1d1ec", - updated_at: "2023-07-12T15:22:03.351221+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - expiration: { - value: "", - updated_at: "2023-07-13T06:42:11.613885+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - __typename: "InternalAccountToken", - }, - __typename: "EdgedInternalAccountToken", - }, - ], - __typename: "PaginatedInternalAccountToken", - }, -}; - -export const accountTokenEditMocksDataBis = { - InternalAccountTokenBisDetailsAndPeers: { - edges: [ - { - node: { - id: accountTokenId, - display_label: "06438eb2-8019-4776-878c-0941b1f1d1ec", - name: { - value: null, - updated_at: "2023-07-12T15:22:03.351221+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - token: { - value: "06438eb2-8019-4776-878c-0941b1f1d1ec", - updated_at: "2023-07-12T15:22:03.351221+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - expiration: { - value: "", - updated_at: "2023-07-13T06:42:11.613885+00:00", - is_protected: false, - is_visible: true, - source: null, - owner: null, - __typename: "TextAttribute", - }, - __typename: "InternalAccountTokenBis", - }, - __typename: "EdgedInternalAccountToken", - }, - ], - __typename: "PaginatedInternalAccountToken", - }, -}; - -export const accountsDropdownOptionsQuery = ` -query DropdownOptions { - CoreAccount { - count - edges { - node { - id - display_label - __typename - } - } - } -} -`; - -export const accountsDropdownOptionsData = { - CoreAccount: { - edges: [ - { - node: { - id: "c75a43b4-1df8-4d8b-894e-9fb684b62f8e", - display_label: "Architecture Team", - __typename: "CoreAccount", - }, - __typename: "EdgedCoreAccount", - }, - { - node: { - id: "4e4ac1bf-3e5c-4c42-808e-c2fdfd684512", - display_label: "Crm Synchronization", - __typename: "CoreAccount", - }, - __typename: "EdgedCoreAccount", - }, - { - node: { - id: "8540d34a-a525-4765-b62e-6ca746e15077", - display_label: "Chloe O'Brian", - __typename: "CoreAccount", - }, - __typename: "EdgedCoreAccount", - }, - { - node: { - id: "68246241-9162-4156-beee-7ba4ed4563e3", - display_label: "David Palmer", - __typename: "CoreAccount", - }, - __typename: "EdgedCoreAccount", - }, - { - node: { - id: "86cdbffb-6bb5-4fcd-808b-fd9ea020fce7", - display_label: "Engineering Team", - __typename: "CoreAccount", - }, - __typename: "EdgedCoreAccount", - }, - { - node: { - id: "65e55704-ba5b-4876-9707-afc5d049424d", - display_label: "Jack Bauer", - __typename: "CoreAccount", - }, - __typename: "EdgedCoreAccount", - }, - { - node: { - id: "f858c0ee-84aa-4f66-a003-2481ca1fd106", - display_label: "Operation Team", - __typename: "CoreAccount", - }, - __typename: "EdgedCoreAccount", - }, - { - node: { - id: "d7f866a8-6b26-4c37-bd79-9082450ca16c", - display_label: "Administrator", - __typename: "CoreAccount", - }, - __typename: "EdgedCoreAccount", - }, - { - node: { - id: "c3412415-707e-4f38-b12a-3a9814483c9f", - display_label: "Pop-Builder", - __typename: "CoreAccount", - }, - __typename: "EdgedCoreAccount", - }, - ], - __typename: "PaginatedCoreAccount", - }, -}; diff --git a/frontend/app/tests/mocks/data/data-changes.ts b/frontend/app/tests/mocks/data/data-changes.ts deleted file mode 100644 index 0b67073a2a..0000000000 --- a/frontend/app/tests/mocks/data/data-changes.ts +++ /dev/null @@ -1,1272 +0,0 @@ -export const objectThreadSchema = { - id: "17a53fb0-8191-7969-2d03-c51b08c9b696", - name: "ObjectThread", - namespace: "Core", - description: "A thread related to an object on a proposed change", - default_filter: null, - branch: "agnostic", - order_by: null, - display_labels: null, - attributes: [ - { - id: "17a53fb0-87c9-e1fd-2d03-c5155a15ff42", - name: "object_path", - kind: "Text", - label: "Object Path", - description: null, - default_value: null, - enum: null, - regex: null, - max_length: null, - min_length: null, - read_only: false, - inherited: false, - unique: false, - branch: "agnostic", - optional: false, - order_weight: 1000, - choices: null, - }, - { - id: "17a53fb0-88a1-592b-2d05-c5153ecc3abe", - name: "label", - kind: "Text", - label: "Label", - description: null, - default_value: null, - enum: null, - regex: null, - max_length: null, - min_length: null, - read_only: false, - inherited: true, - unique: false, - branch: "agnostic", - optional: true, - order_weight: 2000, - choices: null, - }, - { - id: "17a53fb0-89bb-c2db-2d00-c5177efd3b50", - name: "resolved", - kind: "Boolean", - label: "Resolved", - description: null, - default_value: false, - enum: null, - regex: null, - max_length: null, - min_length: null, - read_only: false, - inherited: true, - unique: false, - branch: "agnostic", - optional: true, - order_weight: 3000, - choices: null, - }, - { - id: "17a53fb0-8af7-2523-2d0c-c51f25e2d550", - name: "created_at", - kind: "DateTime", - label: "Created At", - description: null, - default_value: null, - enum: null, - regex: null, - max_length: null, - min_length: null, - read_only: false, - inherited: true, - unique: false, - branch: "agnostic", - optional: true, - order_weight: 4000, - choices: null, - }, - ], - relationships: [ - { - id: "17a53fb0-8bd3-600b-2d0e-c5154b38eac6", - name: "change", - peer: "CoreProposedChange", - kind: "Parent", - direction: "bidirectional", - label: "Change", - description: null, - identifier: "proposedchange__thread", - inherited: true, - cardinality: "one", - branch: "agnostic", - optional: false, - filters: [ - { name: "ids", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "name__value", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "name__values", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "name__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "name__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "name__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "name__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "source_branch__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "source_branch__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "source_branch__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "source_branch__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "source_branch__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "source_branch__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "destination_branch__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "destination_branch__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "destination_branch__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "destination_branch__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "destination_branch__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "destination_branch__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "state__value", - kind: "Text", - enum: ["open", "merged", "closed", "canceled"], - object_kind: null, - description: null, - }, - { name: "state__values", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "state__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "state__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "state__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "state__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - ], - order_weight: 5000, - }, - { - id: "17a53fb0-8c82-44ab-2d06-c51be086e749", - name: "comments", - peer: "CoreThreadComment", - kind: "Component", - direction: "bidirectional", - label: "Comments", - description: null, - identifier: "thread__threadcomment", - inherited: true, - cardinality: "many", - branch: "agnostic", - optional: true, - filters: [{ name: "ids", kind: "Text", enum: null, object_kind: null, description: null }], - order_weight: 6000, - }, - { - id: "17a53fb0-8cec-f6a2-2d05-c518429232f0", - name: "created_by", - peer: "CoreAccount", - kind: "Generic", - direction: "bidirectional", - label: "Created By", - description: null, - identifier: "coreaccount__corethread", - inherited: true, - cardinality: "one", - branch: "agnostic", - optional: true, - filters: [ - { name: "ids", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "name__value", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "name__values", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "name__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "name__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "name__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "name__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { name: "label__value", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "label__values", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "label__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "label__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "label__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "label__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "description__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "description__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "description__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "description__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "description__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "description__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "type__value", - kind: "Text", - enum: ["User", "Script", "Bot", "Git"], - object_kind: null, - description: null, - }, - { name: "type__values", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "type__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "type__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "type__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "type__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "role__value", - kind: "Text", - enum: ["admin", "read-only", "read-write"], - object_kind: null, - description: null, - }, - { name: "role__values", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "role__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "role__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "role__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "role__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - ], - order_weight: 7000, - }, - { - id: "17a53fb0-8d5c-751d-2d04-c517647f7a8e", - name: "member_of_groups", - peer: "CoreGroup", - kind: "Group", - direction: "bidirectional", - label: "Member Of Groups", - description: null, - identifier: "group_member", - inherited: false, - cardinality: "many", - branch: "aware", - optional: true, - filters: [ - { name: "ids", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "name__value", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "name__values", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "name__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "name__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "name__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "name__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { name: "label__value", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "label__values", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "label__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "label__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "label__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "label__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "description__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "description__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "description__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "description__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "description__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "description__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - ], - order_weight: 8000, - }, - { - id: "17a53fb0-8dc4-f2a0-2d00-c517e9f46163", - name: "subscriber_of_groups", - peer: "CoreGroup", - kind: "Group", - direction: "bidirectional", - label: "Subscriber Of Groups", - description: null, - identifier: "group_subscriber", - inherited: false, - cardinality: "many", - branch: "aware", - optional: true, - filters: [ - { name: "ids", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "name__value", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "name__values", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "name__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "name__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "name__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "name__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { name: "label__value", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "label__values", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "label__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "label__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "label__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "label__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "description__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "description__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "description__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "description__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "description__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "description__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - ], - order_weight: 9000, - }, - ], - filters: [ - { name: "ids", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "object_path__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "object_path__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "object_path__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "object_path__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "object_path__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "object_path__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { name: "label__value", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "label__values", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "label__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "label__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { name: "label__source__id", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "label__owner__id", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "resolved__value", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { name: "resolved__values", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "resolved__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "resolved__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "resolved__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "resolved__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { name: "any__value", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "any__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "any__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { name: "any__source__id", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "any__owner__id", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "change__ids", - kind: "Text", - enum: null, - object_kind: "CoreProposedChange", - description: null, - }, - { - name: "change__name__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__name__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__name__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__name__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__name__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__name__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__source_branch__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__source_branch__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__source_branch__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__source_branch__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__source_branch__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__source_branch__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__destination_branch__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__destination_branch__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__destination_branch__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__destination_branch__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__destination_branch__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__destination_branch__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__state__value", - kind: "Text", - enum: ["open", "merged", "closed", "canceled"], - object_kind: null, - description: null, - }, - { - name: "change__state__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__state__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__state__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__state__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - name: "change__state__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - ], - include_in_menu: false, - menu_placement: null, - icon: null, - label: "Thread - Object", - inherit_from: ["CoreThread"], - groups: [], - kind: "CoreObjectThread", - hash: "5a92f8a14bf7c4dd08da69e168d1c4b7", -}; - -export const DataDiffProposedChangesState = { - id: "17a54304-4b2a-264a-2d02-c51eae9cdde6", - display_label: "pc-data-changes", - __typename: "CoreProposedChange", - _updated_at: "2023-12-29T09:21:59.750848+00:00", - name: { - value: "pc-data-changes", - __typename: "TextAttribute", - }, - source_branch: { - value: "branch-data-changes", - __typename: "TextAttribute", - }, - destination_branch: { - value: "main", - __typename: "TextAttribute", - }, - state: { - value: "open", - __typename: "TextAttribute", - }, - approved_by: { - edges: [], - __typename: "NestedPaginatedCoreAccount", - }, - reviewers: { - edges: [], - __typename: "NestedPaginatedCoreAccount", - }, - created_by: { - node: { - id: "17a53fb1-512b-ae9e-2d0d-c51fade6639c", - display_label: "Admin", - __typename: "CoreAccount", - }, - __typename: "NestedEdgedCoreAccount", - }, -}; - -export const getAllCoreObjectThreadMockQuery = `query getThreadsAndChecksForCoreObjectThread { - CoreObjectThread( - change__ids: "1cec1fe9-fcc4-4f5b-af30-9d661de65bd8" - ) { - count - edges { - node { - __typename - id - object_path { - value - } - comments { - count - } - } - } - } -}`; - -export const getAllCoreObjectThreadMockData = { - data: { - CoreObjectThread: { - count: 1, - edges: [ - { - node: { - __typename: "CoreObjectThread", - id: "17a54a5c-55d1-0620-2d0a-c51d250954a9", - object_path: { - value: "data/17a53fbd-e27b-784b-393c-c5127a1b1be3", - __typename: "TextAttribute", - }, - comments: { - count: 1, - __typename: "NestedPaginatedCoreThreadComment", - }, - }, - __typename: "EdgedCoreObjectThread", - }, - ], - __typename: "PaginatedCoreObjectThread", - }, - }, -}; - -export const getCoreObjectThreadMockQuery = `query getProposedChangesThreadsForCoreObjectThread { - CoreObjectThread( - change__ids: "1cec1fe9-fcc4-4f5b-af30-9d661de65bd8" - object_path__value: "data/17a53fbd-e27b-784b-393c-c5127a1b1be3" - ) { - count - edges { - node { - __typename - id - comments { - count - } - } - } - } -}`; - -export const getCoreObjectThreadMockData = { - data: { - CoreObjectThread: { - count: 1, - edges: [ - { - node: { - __typename: "CoreObjectThread", - id: "17a54a5c-55d1-0620-2d0a-c51d250954a9", - object_path: { - value: "data/17a53fbd-e27b-784b-393c-c5127a1b1be3", - __typename: "TextAttribute", - }, - comments: { - count: 1, - __typename: "NestedPaginatedCoreThreadComment", - }, - }, - __typename: "EdgedCoreObjectThread", - }, - ], - __typename: "PaginatedCoreObjectThread", - }, - }, -}; - -export const getCoreObjectWithoutThreadMockData = { - data: { - CoreObjectThread: { - count: 0, - edges: [], - __typename: "PaginatedCoreObjectThread", - }, - }, -}; - -export const getProposedChangesCommentsMockQuery = `query getProposedChangesObjectThreadCommentsForCoreObjectThread{ - CoreObjectThread( - change__ids: "1cec1fe9-fcc4-4f5b-af30-9d661de65bd8" - object_path__value: "data/17a53fbd-e27b-784b-393c-c5127a1b1be3" - ) { - count - edges { - node { - __typename - id - display_label - resolved { - value - } - created_by { - node { - display_label - } - } - comments { - count - edges { - node { - id - display_label - created_by { - node { - display_label - } - } - created_at { - value - } - text { - value - } - } - } - } - } - } - } -}`; - -export const createThreadMockData = { - data: { - CoreObjectThreadCreate: { - object: { - id: "17a55dee-acc3-3df9-2d04-c51c5bd2161d", - display_label: "CoreObjectThread(ID: 17a55dee-acc3-3df9-2d04-c51c5bd2161d)", - __typename: "CoreObjectThread", - }, - ok: true, - __typename: "CoreObjectThreadCreate", - }, - }, -}; - -export const getProposedChangesCommentsMockData = { - data: { - CoreObjectThread: { - count: 1, - edges: [ - { - node: { - __typename: "CoreObjectThread", - id: "17a55fb5-7d2f-f712-2d0e-c5154fe1bf17", - display_label: "CoreObjectThread(ID: 17a55fb5-7d2f-f712-2d0e-c5154fe1bf17)", - resolved: { - value: false, - __typename: "CheckboxAttribute", - }, - created_by: { - node: null, - __typename: "NestedEdgedCoreAccount", - }, - comments: { - count: 1, - edges: [ - { - node: { - id: "17a55fb5-82a1-197c-2d0e-c512237d7fa5", - display_label: "new comment", - created_by: { - node: { - display_label: "Admin", - __typename: "CoreAccount", - }, - __typename: "NestedEdgedCoreAccount", - }, - created_at: { - value: "2023-12-29T19:07:47+01:00", - __typename: "TextAttribute", - }, - text: { - value: "new comment", - __typename: "TextAttribute", - }, - __typename: "CoreThreadComment", - }, - __typename: "NestedEdgedCoreThreadComment", - }, - ], - __typename: "NestedPaginatedCoreThreadComment", - }, - }, - __typename: "EdgedCoreObjectThread", - }, - ], - __typename: "PaginatedCoreObjectThread", - }, - }, -}; - -export const createThreadCommentMockQuery = `mutation CoreThreadCommentCreate { - CoreThreadCommentCreate( - data: {text: {value: "new reply"}, thread: {id: "17a55fb5-7d2f-f712-2d0e-c5154fe1bf17"}, created_by: {id: "d07bb58e-8394-4053-a198-9cef84e7d6c0"}, created_at: {value: "2023-12-24T12:24:36+01:00"}} - ) { - object { - id - display_label - __typename - } - ok - __typename - } -}`; diff --git a/frontend/app/tests/mocks/data/devices.ts b/frontend/app/tests/mocks/data/devices.ts index 48415dc140..95648b5eb5 100644 --- a/frontend/app/tests/mocks/data/devices.ts +++ b/frontend/app/tests/mocks/data/devices.ts @@ -1761,10 +1761,10 @@ export const getPermissionsData = { { node: { kind: "InfraDevice", - view: "ALLOW_ALL", - create: "ALLOW_ALL", - update: "ALLOW_ALL", - delete: "ALLOW_ALL", + view: "ALLOW", + create: "ALLOW", + update: "ALLOW", + delete: "ALLOW", __typename: "ObjectPermission", }, __typename: "ObjectPermissionNode", diff --git a/frontend/app/tests/mocks/data/generics.ts b/frontend/app/tests/mocks/data/generics.ts deleted file mode 100644 index 582065a6be..0000000000 --- a/frontend/app/tests/mocks/data/generics.ts +++ /dev/null @@ -1,257 +0,0 @@ -import { iGenericSchema } from "../../../src/state/atoms/schema.atom"; -import { deviceDetailsMocksGenerics } from "./devices"; - -export const genericsMocks: iGenericSchema[] = [ - { - id: "da5ab3da-621b-4e90-b543-f6f8eb9860a2", - name: "Endpoint", - namespace: "Infra", - description: undefined, - default_filter: undefined, - order_by: undefined, - display_labels: undefined, - attributes: [], - relationships: [ - { - id: "41b42400-daf4-4d07-a270-bde920061c01", - name: "connected_endpoint", - peer: "InfraEndpoint", - kind: "Attribute", - label: "Connected Endpoint", - description: undefined, - identifier: "connected__endpoint", - inherited: false, - cardinality: "one", - branch: true, - optional: true, - filters: [], - order_weight: 1000, - }, - ], - branch: true, - label: "Endpoint", - used_by: ["InfraCircuitEndpoint", "InfraInterfaceL2", "InfraInterfaceL3"], - kind: "InfraEndpoint", - }, - { - id: "76c153b2-d580-43ec-b4de-50ec67ee1881", - name: "Group", - namespace: "Core", - description: undefined, - default_filter: "name__value", - order_by: ["name__value"], - display_labels: ["label__value"], - attributes: [ - { - id: "001e93e4-b02a-4b3a-ba0c-626915395358", - name: "name", - kind: "Text", - namespace: "Attribute", - label: "Name", - description: undefined, - default_value: undefined, - enum: undefined, - regex: undefined, - max_length: undefined, - min_length: undefined, - inherited: false, - unique: true, - branch: true, - optional: false, - order_weight: 1000, - }, - { - id: "93a01cde-b121-43ec-ad66-92aa868e6c0d", - name: "label", - kind: "Text", - namespace: "Attribute", - label: "Label", - description: undefined, - default_value: undefined, - enum: undefined, - regex: undefined, - max_length: undefined, - min_length: undefined, - inherited: false, - unique: false, - branch: true, - optional: true, - order_weight: 2000, - }, - { - id: "58d1777f-13bc-4937-a3ca-7d889fd7def4", - name: "description", - kind: "Text", - namespace: "Attribute", - label: "Description", - description: undefined, - default_value: undefined, - enum: undefined, - regex: undefined, - max_length: undefined, - min_length: undefined, - inherited: false, - unique: false, - branch: true, - optional: true, - order_weight: 3000, - }, - ], - relationships: [ - { - id: "7fdb61ff-95d4-40d0-bb6a-8e6a87bb77db", - name: "members", - peer: "CoreNode", - kind: "Generic", - label: "Members", - description: undefined, - identifier: "group_member", - inherited: false, - cardinality: "many", - branch: true, - optional: true, - filters: [], - order_weight: 4000, - }, - { - id: "f3b41911-e110-4cf7-b7a9-6708702d0764", - name: "subscribers", - peer: "CoreNode", - kind: "Generic", - label: "Subscribers", - description: undefined, - identifier: "group_subscriber", - inherited: false, - cardinality: "many", - branch: true, - optional: true, - filters: [], - order_weight: 5000, - }, - ], - branch: true, - label: "Group", - used_by: ["CoreStandardGroup"], - kind: "CoreGroup", - }, - deviceDetailsMocksGenerics[0], - { - id: "f069caa4-0f8b-425b-bc18-ef57d69e1ce2", - name: "Node", - namespace: "Core", - description: undefined, - default_filter: undefined, - order_by: undefined, - display_labels: undefined, - attributes: [], - relationships: [], - branch: true, - label: "Node", - used_by: [], - kind: "CoreNode", - }, - { - id: "757c1d52-ba8a-4686-9e2d-f09ab41336c6", - name: "Owner", - namespace: "Lineage", - description: undefined, - default_filter: undefined, - order_by: undefined, - display_labels: ["name__value"], - attributes: [ - { - id: "2f4d3921-3ea6-43af-bb5c-45b7019c32ba", - name: "name", - kind: "Text", - namespace: "Attribute", - label: "Name", - description: undefined, - default_value: undefined, - enum: undefined, - regex: undefined, - max_length: undefined, - min_length: undefined, - inherited: false, - unique: true, - branch: true, - optional: false, - order_weight: 1000, - }, - { - id: "b2aa80d8-2a9f-4493-a7aa-e6f4689a21be", - name: "description", - kind: "Text", - namespace: "Attribute", - label: "Description", - description: undefined, - default_value: undefined, - enum: undefined, - regex: undefined, - max_length: undefined, - min_length: undefined, - inherited: false, - unique: false, - branch: true, - optional: true, - order_weight: 2000, - }, - ], - relationships: [], - branch: true, - label: "Owner", - used_by: ["CoreAccount", "CoreRepository"], - kind: "LineageOwner", - }, - { - id: "1260ab98-5d35-4479-96dd-3d7ad261a19c", - name: "Source", - namespace: "Lineage", - description: "Any Entities that stores or produces data.", - default_filter: undefined, - order_by: undefined, - display_labels: ["name__value"], - attributes: [ - { - id: "3fa53678-7023-4413-8cb5-e9bae1ef7fe0", - name: "name", - kind: "Text", - namespace: "Attribute", - label: "Name", - description: undefined, - default_value: undefined, - enum: undefined, - regex: undefined, - max_length: undefined, - min_length: undefined, - inherited: false, - unique: true, - branch: true, - optional: false, - order_weight: 1000, - }, - { - id: "739d1b9e-6605-498d-b2c3-cb5aaba39789", - name: "description", - kind: "Text", - namespace: "Attribute", - label: "Description", - description: undefined, - default_value: undefined, - enum: undefined, - regex: undefined, - max_length: undefined, - min_length: undefined, - inherited: false, - unique: false, - branch: true, - optional: true, - order_weight: 2000, - }, - ], - relationships: [], - branch: true, - label: "Source", - used_by: ["CoreAccount", "CoreRepository"], - kind: "LineageSource", - }, -]; diff --git a/frontend/app/tests/mocks/data/ip-address.ts b/frontend/app/tests/mocks/data/ip-address.ts index 905db8e082..321774a595 100644 --- a/frontend/app/tests/mocks/data/ip-address.ts +++ b/frontend/app/tests/mocks/data/ip-address.ts @@ -1,1232 +1,3 @@ -export const ipAddressMocksSchema = { - id: "17d341e9-2da6-9005-2e36-c51e869c589a", - state: "present", - name: "IPAddress", - namespace: "Builtin", - description: "IPv6 or IPv4 address", - label: "IP Address", - branch: "aware", - default_filter: "address__value", - human_friendly_id: null, - display_labels: ["address__value"], - include_in_menu: false, - menu_placement: null, - icon: "mdi:ip-outline", - order_by: ["address__version", "address__binary_address"], - uniqueness_constraints: null, - documentation: null, - filters: [ - { - id: null, - state: "present", - name: "ids", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "any__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "any__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "any__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "any__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "any__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - ], - attributes: [ - { - id: "17d341e9-3642-67c7-2e3a-c51a0386c4b6", - state: "present", - name: "address", - kind: "IPHost", - enum: null, - choices: null, - regex: null, - max_length: null, - min_length: null, - label: "Address", - description: null, - read_only: false, - unique: false, - optional: false, - branch: "aware", - order_weight: 1000, - default_value: null, - inherited: false, - allow_override: "any", - }, - { - id: "17d341e9-52aa-5f50-2e3e-c51f833e62e1", - state: "present", - name: "description", - kind: "Text", - enum: null, - choices: null, - regex: null, - max_length: null, - min_length: null, - label: "Description", - description: null, - read_only: false, - unique: false, - optional: true, - branch: "aware", - order_weight: 2000, - default_value: null, - inherited: false, - allow_override: "any", - }, - ], - relationships: [ - { - id: "17d341e9-5367-30e1-2e3b-c513fa8e87a6", - state: "present", - name: "ip_namespace", - peer: "BuiltinIPNamespace", - kind: "Generic", - label: "IP Namespace", - description: null, - identifier: "ip_namespace__ip_address", - cardinality: "one", - min_count: 0, - max_count: 1, - order_weight: 3000, - optional: true, - branch: "aware", - inherited: false, - direction: "bidirectional", - hierarchical: null, - filters: [ - { - id: null, - state: "present", - name: "ids", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "name__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "name__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "name__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "name__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "name__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "name__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - ], - on_delete: "no-action", - allow_override: "none", - read_only: false, - }, - { - id: "17d341e9-5413-cb14-2e3c-c51f14a85af6", - state: "present", - name: "ip_prefix", - peer: "BuiltinIPPrefix", - kind: "Generic", - label: "IP Prefix", - description: null, - identifier: "ip_prefix__ip_address", - cardinality: "one", - min_count: 0, - max_count: 1, - order_weight: 4000, - optional: true, - branch: "aware", - inherited: false, - direction: "bidirectional", - hierarchical: null, - filters: [ - { - id: null, - state: "present", - name: "ids", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "member_type__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "member_type__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "member_type__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "member_type__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "member_type__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "member_type__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "is_pool__value", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "is_pool__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "is_pool__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "is_pool__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "is_pool__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "is_pool__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "is_top_level__value", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "is_top_level__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "is_top_level__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "is_top_level__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "is_top_level__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "is_top_level__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "utilization__value", - kind: "Number", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "utilization__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "utilization__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "utilization__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "utilization__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "utilization__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "netmask__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "netmask__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "netmask__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "netmask__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "netmask__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "netmask__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "hostmask__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "hostmask__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "hostmask__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "hostmask__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "hostmask__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "hostmask__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "network_address__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "network_address__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "network_address__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "network_address__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "network_address__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "network_address__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "broadcast_address__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "broadcast_address__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "broadcast_address__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "broadcast_address__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "broadcast_address__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "broadcast_address__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - ], - on_delete: "no-action", - allow_override: "none", - read_only: true, - }, - { - id: "17d341e9-54cd-423e-2e3f-c513c6025372", - state: "present", - name: "member_of_groups", - peer: "CoreGroup", - kind: "Group", - label: "Member Of Groups", - description: null, - identifier: "group_member", - cardinality: "many", - min_count: 0, - max_count: 0, - order_weight: 5000, - optional: true, - branch: "aware", - inherited: false, - direction: "bidirectional", - hierarchical: null, - filters: [ - { - id: null, - state: "present", - name: "ids", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "name__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "name__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "name__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "name__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "name__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "name__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "label__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "label__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "label__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "label__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "label__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "label__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - ], - on_delete: "no-action", - allow_override: "any", - read_only: false, - }, - { - id: "17d341e9-5585-ae7c-2e3f-c515f713fd81", - state: "present", - name: "subscriber_of_groups", - peer: "CoreGroup", - kind: "Group", - label: "Subscriber Of Groups", - description: null, - identifier: "group_subscriber", - cardinality: "many", - min_count: 0, - max_count: 0, - order_weight: 6000, - optional: true, - branch: "aware", - inherited: false, - direction: "bidirectional", - hierarchical: null, - filters: [ - { - id: null, - state: "present", - name: "ids", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "name__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "name__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "name__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "name__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "name__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "name__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "label__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "label__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "label__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "label__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "label__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "label__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__values", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__is_visible", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__is_protected", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__source__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - { - id: null, - state: "present", - name: "description__owner__id", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - ], - on_delete: "no-action", - allow_override: "any", - read_only: false, - }, - ], - hierarchical: false, - used_by: ["IpamIPAddress"], - kind: "BuiltinIPAddress", - hash: "925227d93c202e6c3348bbc466634deb", -}; - export const ipamIpAddressMocksSchema = { id: "17d341ee-6777-7f5e-2e3f-c51f119691e0", state: "present", diff --git a/frontend/app/tests/mocks/data/permissions.ts b/frontend/app/tests/mocks/data/permissions.ts index 05b2772646..f4a2bc33f6 100644 --- a/frontend/app/tests/mocks/data/permissions.ts +++ b/frontend/app/tests/mocks/data/permissions.ts @@ -3,10 +3,10 @@ export const permissionsAllow = { { node: { kind: "InfraDevice", - view: "ALLOW_ALL", - create: "ALLOW_ALL", - update: "ALLOW_ALL", - delete: "ALLOW_ALL", + view: "ALLOW", + create: "ALLOW", + update: "ALLOW", + delete: "ALLOW", __typename: "ObjectPermission", }, __typename: "ObjectPermissionNode", diff --git a/frontend/app/tests/mocks/data/task.ts b/frontend/app/tests/mocks/data/task.ts deleted file mode 100644 index 80468d28b7..0000000000 --- a/frontend/app/tests/mocks/data/task.ts +++ /dev/null @@ -1,195 +0,0 @@ -import { permissionsAllow } from "./permissions"; - -export const taskMocksSchema = [ - { - id: "8a4e2579-c300-48e1-b703-022bf6d224df", - name: "Task", - namespace: "Test", - description: "Issue tracker", - default_filter: "name__value", - order_by: ["name__value"], - display_labels: ["name__value"], - attributes: [ - { - id: "30d6f53c-7c97-473e-b0cb-b8f1e1d02f2e", - name: "name", - kind: "Text", - namespace: "Attribute", - label: "Name", - description: null, - default_value: null, - enum: null, - regex: null, - max_length: null, - min_length: null, - inherited: false, - unique: true, - branch: true, - optional: false, - order_weight: 1000, - }, - { - id: "ad1a60f6-efce-445b-9995-b760a6f73f8c", - name: "description", - kind: "TextArea", - namespace: "Attribute", - label: "Description", - description: null, - default_value: null, - enum: null, - regex: null, - max_length: null, - min_length: null, - inherited: false, - unique: false, - branch: true, - optional: true, - order_weight: 2000, - }, - { - id: "374125ad-4bfe-49ac-b6d2-059b7bba19ce", - name: "completed", - kind: "Boolean", - namespace: "Attribute", - label: "Completed", - description: null, - default_value: false, - enum: null, - regex: null, - max_length: null, - min_length: null, - inherited: false, - unique: false, - branch: true, - optional: false, - order_weight: 3000, - }, - ], - relationships: [ - { - id: "60872203-876d-4ac2-82fe-ff31394f0578", - name: "member_of_groups", - peer: "CoreGroup", - kind: "Group", - label: "Member Of Groups", - description: null, - identifier: "group_member", - inherited: false, - cardinality: "many", - branch: true, - optional: true, - filters: [ - { name: "id", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "name__value", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "label__value", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "description__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - ], - order_weight: 4000, - }, - { - id: "1e8d92b9-e101-4445-9b47-92fa17d5f15d", - name: "subscriber_of_groups", - peer: "CoreGroup", - kind: "Group", - label: "Subscriber Of Groups", - description: null, - identifier: "group_subscriber", - inherited: false, - cardinality: "many", - branch: true, - optional: true, - filters: [ - { name: "id", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "name__value", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "label__value", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "description__value", - kind: "Text", - enum: null, - object_kind: null, - description: null, - }, - ], - order_weight: 5000, - }, - ], - label: "Task", - inherit_from: [], - groups: [], - branch: true, - filters: [ - { name: "ids", kind: "Text", enum: null, object_kind: null, description: null }, - { name: "name__value", kind: "Text", enum: null, object_kind: null, description: null }, - { - name: "completed__value", - kind: "Boolean", - enum: null, - object_kind: null, - description: null, - }, - ], - kind: "TestTask", - }, -]; - -export const taskMocksQuery = ` -query TestTask { - TestTask(offset: 0,limit: 10) { - count - edges { - node { - id - display_label - __typename - name { - value - } - description { - value - } - completed { - value - } - } - } - permissions { - edges { - node { - kind - view - create - update - delete - } - } - } - } -} -`; - -export const taskMocksData = { - TestTask: { - count: 1, - edges: [ - { - node: { - id: "c5b3043d-bb86-46ac-8790-227a61de3305", - display_label: "aze", - __typename: "TestTask", - name: { value: "aze", __typename: "TextAttribute" }, - description: { value: null, __typename: "TextAttribute" }, - completed: { value: false, __typename: "CheckboxAttribute" }, - }, - __typename: "EdgedTestTask", - }, - ], - permissions: permissionsAllow, - __typename: "PaginatedTestTask", - }, -}; diff --git a/frontend/app/tests/mocks/e2e/accounts.ts b/frontend/app/tests/mocks/e2e/accounts.ts deleted file mode 100644 index 4758a53b8c..0000000000 --- a/frontend/app/tests/mocks/e2e/accounts.ts +++ /dev/null @@ -1,9 +0,0 @@ -export const MAIN_BRANCH_NAME = "main"; -export const NEW_BRANCH_NAME = "cr1234"; -export const ADMIN_ACCOUNT_NAME = "admin"; -export const ADMIN_ACCOUNT_LABEL = "Admin"; -export const NEW_ADMIN_ACCOUNT_LABEL = "Administrator"; -export const NEW_ACCOUNT = { - name: "New Account", - password: "test", -}; diff --git a/frontend/app/tests/mocks/e2e/artifacts.ts b/frontend/app/tests/mocks/e2e/artifacts.ts deleted file mode 100644 index 9114eb22c5..0000000000 --- a/frontend/app/tests/mocks/e2e/artifacts.ts +++ /dev/null @@ -1,2 +0,0 @@ -export const ARTIFACT_DEFINITION_NAME = "startup-config"; -export const ARTIFACT_DEFINITION_FULL_NAME = "Startup Config for Edge devices"; diff --git a/frontend/app/tests/mocks/e2e/organizations.ts b/frontend/app/tests/mocks/e2e/organizations.ts deleted file mode 100644 index 56519278d8..0000000000 --- a/frontend/app/tests/mocks/e2e/organizations.ts +++ /dev/null @@ -1,5 +0,0 @@ -export const MAIN_BRANCH_NAME = "main"; -export const NEW_BRANCH_NAME = "cr1234"; -export const ORGANIZATION_NAME = "my-first-org"; -export const ORGANIZATION_DESCRIPTION = "Testing Infrahub"; -export const NEW_ORGANIZATION_DESCRIPTION = "Changes from branch cr1234"; diff --git a/frontend/app/tests/mocks/e2e/proposed-changes.ts b/frontend/app/tests/mocks/e2e/proposed-changes.ts deleted file mode 100644 index 7489511e82..0000000000 --- a/frontend/app/tests/mocks/e2e/proposed-changes.ts +++ /dev/null @@ -1,6 +0,0 @@ -export const PROPOSED_CHANGES_NAME = "New changes"; -export const PROPOSED_CHANGES_BRANCH = "ord1-add-upstream"; -export const PROPOSED_CHANGES_BRANCH_CONFLICT = "platform-conflict"; -export const PROPOSED_CHANGE_COMMENT_1 = "First comment here, a new thread will be created"; -export const PROPOSED_CHANGE_COMMENT_2 = "Second comment here, to reply to the first one"; -export const PROPOSED_CHANGE_COMMENT_3 = "Last comment to close this wonderful thread"; diff --git a/frontend/app/tests/mocks/e2e/validators.ts b/frontend/app/tests/mocks/e2e/validators.ts deleted file mode 100644 index 38538ea055..0000000000 --- a/frontend/app/tests/mocks/e2e/validators.ts +++ /dev/null @@ -1,7 +0,0 @@ -export const PROPOSED_CHANGES_NAME_FAIL = "New changes with failed validators"; - -export const PROPOSED_CHANGES_NAME_WARNING = "New changes with warning"; - -export const PROPOSED_CHANGES_BRANCH_CLEAN = "ord1-add-upstream"; - -export const PROPOSED_CHANGES_BRANCH_CONFLICT = "platform-conflict"; diff --git a/frontend/app/tests/unit/components/form/utils/getFormFieldsFromSchema.test.ts b/frontend/app/tests/unit/components/form/utils/getFormFieldsFromSchema.test.ts index 60f8ca6a88..e50bac0fea 100644 --- a/frontend/app/tests/unit/components/form/utils/getFormFieldsFromSchema.test.ts +++ b/frontend/app/tests/unit/components/form/utils/getFormFieldsFromSchema.test.ts @@ -2,6 +2,8 @@ import { getFormFieldsFromSchema } from "@/components/form/utils/getFormFieldsFr import { AuthContextType } from "@/hooks/useAuth"; import { components } from "@/infraops"; import { RelationshipSchema } from "@/screens/schema/types"; +import { store } from "@/state"; +import { currentBranchAtom } from "@/state/atoms/branches.atom"; import { IModelSchema } from "@/state/atoms/schema.atom"; import { AttributeType } from "@/utils/getObjectItemDisplayValue"; import { describe, expect, it } from "vitest"; @@ -441,4 +443,338 @@ describe("getFormFieldsFromSchema", () => { }, }); }); + + it("should disable a field if permission is DENY", () => { + // GIVEN + const schema = { + attributes: [buildAttributeSchema()], + } as IModelSchema; + + const initialObject: { field1: Partial } = { + field1: { + is_from_profile: false, + is_protected: true, + is_visible: true, + owner: { + id: "17dd42a7-d547-60af-3111-c51b4b2fc72e", + display_label: "Architecture Team", + }, + permissions: { + update_value: "DENY", + }, + source: null, + updated_at: "2024-07-15T09:32:01.363787+00:00", + value: "test-value", + __typename: "TextAttribute", + }, + }; + + // WHEN + const fields = getFormFieldsFromSchema({ schema, initialObject }); + + // THEN + expect(fields.length).to.equal(1); + expect(fields[0]).toEqual({ + defaultValue: { source: { type: "user" }, value: "test-value" }, + description: undefined, + disabled: true, + name: "field1", + label: "Field 1", + type: "Text", + unique: false, + rules: { + required: false, + validate: { + required: expect.any(Function), + }, + }, + }); + }); + + it("should enable a field if permission is ALLOW_ALL", () => { + // GIVEN + const schema = { + attributes: [buildAttributeSchema()], + } as IModelSchema; + + const initialObject: { field1: Partial } = { + field1: { + is_from_profile: false, + is_protected: true, + is_visible: true, + owner: { + id: "17dd42a7-d547-60af-3111-c51b4b2fc72e", + display_label: "Architecture Team", + }, + permissions: { + update_value: "ALLOW", + }, + source: null, + updated_at: "2024-07-15T09:32:01.363787+00:00", + value: "test-value", + __typename: "TextAttribute", + }, + }; + + // WHEN + const fields = getFormFieldsFromSchema({ schema, initialObject }); + + // THEN + expect(fields.length).to.equal(1); + expect(fields[0]).toEqual({ + defaultValue: { source: { type: "user" }, value: "test-value" }, + description: undefined, + disabled: false, + name: "field1", + label: "Field 1", + type: "Text", + unique: false, + rules: { + required: false, + validate: { + required: expect.any(Function), + }, + }, + }); + }); + + it("should enable a field if permission is ALLOW_DEFAULT and current branch is default", () => { + // GIVEN + const schema = { + attributes: [buildAttributeSchema()], + } as IModelSchema; + + const initialObject: { field1: Partial } = { + field1: { + is_from_profile: false, + is_protected: true, + is_visible: true, + owner: { + id: "17dd42a7-d547-60af-3111-c51b4b2fc72e", + display_label: "Architecture Team", + }, + permissions: { + update_value: "ALLOW_DEFAULT", + }, + source: null, + updated_at: "2024-07-15T09:32:01.363787+00:00", + value: "test-value", + __typename: "TextAttribute", + }, + }; + + store.set(currentBranchAtom, { + id: "18007869-b812-f080-2d60-c51d9e906226", + name: "mainnn", + description: "Default Branch", + origin_branch: "main", + branched_from: "2024-10-21T12:44:12.365354Z", + created_at: "2024-10-21T12:44:12.365371Z", + sync_with_git: true, + is_default: true, + has_schema_changes: false, + __typename: "Branch", + }); + + // WHEN + const fields = getFormFieldsFromSchema({ schema, initialObject }); + + // THEN + expect(fields.length).to.equal(1); + expect(fields[0]).toEqual({ + defaultValue: { source: { type: "user" }, value: "test-value" }, + description: undefined, + disabled: false, + name: "field1", + label: "Field 1", + type: "Text", + unique: false, + rules: { + required: false, + validate: { + required: expect.any(Function), + }, + }, + }); + }); + + it("should disable a field if permission is ALLOW_DEFAULT and current branch is not default", () => { + // GIVEN + const schema = { + attributes: [buildAttributeSchema()], + } as IModelSchema; + + const initialObject: { field1: Partial } = { + field1: { + is_from_profile: false, + is_protected: true, + is_visible: true, + owner: { + id: "17dd42a7-d547-60af-3111-c51b4b2fc72e", + display_label: "Architecture Team", + }, + permissions: { + update_value: "ALLOW_DEFAULT", + }, + source: null, + updated_at: "2024-07-15T09:32:01.363787+00:00", + value: "test-value", + __typename: "TextAttribute", + }, + }; + + store.set(currentBranchAtom, { + id: "18007869-b812-f080-2d60-c51d9e906226", + name: "other", + description: "other Branch", + origin_branch: "main", + branched_from: "2024-10-21T12:44:12.365354Z", + created_at: "2024-10-21T12:44:12.365371Z", + sync_with_git: true, + is_default: false, + has_schema_changes: false, + __typename: "Branch", + }); + + // WHEN + const fields = getFormFieldsFromSchema({ schema, initialObject }); + + // THEN + expect(fields.length).to.equal(1); + expect(fields[0]).toEqual({ + defaultValue: { source: { type: "user" }, value: "test-value" }, + description: undefined, + disabled: true, + name: "field1", + label: "Field 1", + type: "Text", + unique: false, + rules: { + required: false, + validate: { + required: expect.any(Function), + }, + }, + }); + }); + + it("should disable a field if permission is ALLOW_OTHER and current branch is default", () => { + // GIVEN + const schema = { + attributes: [buildAttributeSchema()], + } as IModelSchema; + + const initialObject: { field1: Partial } = { + field1: { + is_from_profile: false, + is_protected: true, + is_visible: true, + owner: { + id: "17dd42a7-d547-60af-3111-c51b4b2fc72e", + display_label: "Architecture Team", + }, + permissions: { + update_value: "ALLOW_OTHER", + }, + source: null, + updated_at: "2024-07-15T09:32:01.363787+00:00", + value: "test-value", + __typename: "TextAttribute", + }, + }; + + store.set(currentBranchAtom, { + id: "18007869-b812-f080-2d60-c51d9e906226", + name: "main", + description: "Default Branch", + origin_branch: "main", + branched_from: "2024-10-21T12:44:12.365354Z", + created_at: "2024-10-21T12:44:12.365371Z", + sync_with_git: true, + is_default: true, + has_schema_changes: false, + __typename: "Branch", + }); + + // WHEN + const fields = getFormFieldsFromSchema({ schema, initialObject }); + + // THEN + expect(fields.length).to.equal(1); + expect(fields[0]).toEqual({ + defaultValue: { source: { type: "user" }, value: "test-value" }, + description: undefined, + disabled: true, + name: "field1", + label: "Field 1", + type: "Text", + unique: false, + rules: { + required: false, + validate: { + required: expect.any(Function), + }, + }, + }); + }); + + it("should disable a field if permission is ALLOW_OTHER and current branch is not default", () => { + // GIVEN + const schema = { + attributes: [buildAttributeSchema()], + } as IModelSchema; + + const initialObject: { field1: Partial } = { + field1: { + is_from_profile: false, + is_protected: true, + is_visible: true, + owner: { + id: "17dd42a7-d547-60af-3111-c51b4b2fc72e", + display_label: "Architecture Team", + }, + permissions: { + update_value: "ALLOW_OTHER", + }, + source: null, + updated_at: "2024-07-15T09:32:01.363787+00:00", + value: "test-value", + __typename: "TextAttribute", + }, + }; + + store.set(currentBranchAtom, { + id: "18007869-b812-f080-2d60-c51d9e906226", + name: "other", + description: "other Branch", + origin_branch: "main", + branched_from: "2024-10-21T12:44:12.365354Z", + created_at: "2024-10-21T12:44:12.365371Z", + sync_with_git: true, + is_default: false, + has_schema_changes: false, + __typename: "Branch", + }); + + // WHEN + const fields = getFormFieldsFromSchema({ schema, initialObject }); + + // THEN + expect(fields.length).to.equal(1); + expect(fields[0]).toEqual({ + defaultValue: { source: { type: "user" }, value: "test-value" }, + description: undefined, + disabled: false, + name: "field1", + label: "Field 1", + type: "Text", + unique: false, + rules: { + required: false, + validate: { + required: expect.any(Function), + }, + }, + }); + }); }); diff --git a/frontend/app/tests/unit/components/form/utils/getRelationshipDefaultValue.test.ts b/frontend/app/tests/unit/components/form/utils/getRelationshipDefaultValue.test.ts index 90b19c27b5..4e097063ae 100644 --- a/frontend/app/tests/unit/components/form/utils/getRelationshipDefaultValue.test.ts +++ b/frontend/app/tests/unit/components/form/utils/getRelationshipDefaultValue.test.ts @@ -1,7 +1,9 @@ import { getRelationshipDefaultValue } from "@/components/form/utils/getRelationshipDefaultValue"; import { RESOURCE_GENERIC_KIND } from "@/screens/resource-manager/constants"; +import { store } from "@/state"; +import { iNodeSchema, schemaState } from "@/state/atoms/schema.atom"; import { RelationshipManyType, RelationshipOneType } from "@/utils/getObjectItemDisplayValue"; -import { describe, expect, vi } from "vitest"; +import { describe, expect } from "vitest"; const buildRelationshipOneData = (override: Partial): RelationshipOneType => ({ node: { @@ -53,12 +55,9 @@ describe("getRelationshipDefaultValue", () => { it("returns relationship from pool", () => { // GIVEN - vi.mock("jotai", () => ({ - atom: vi.fn(), - createStore: () => ({ - get: () => [{ kind: "FakeResourcePool", inherit_from: [RESOURCE_GENERIC_KIND] }], - }), - })); + store.set(schemaState, [ + { kind: "FakeResourcePool", inherit_from: [RESOURCE_GENERIC_KIND] } as iNodeSchema, + ]); const relationshipData = buildRelationshipOneData({ properties: { diff --git a/frontend/app/tests/unit/components/form/utils/getUpdateMutationFromFormData.test.ts b/frontend/app/tests/unit/components/form/utils/getUpdateMutationFromFormData.test.ts index bd70c27673..6ff06524b6 100644 --- a/frontend/app/tests/unit/components/form/utils/getUpdateMutationFromFormData.test.ts +++ b/frontend/app/tests/unit/components/form/utils/getUpdateMutationFromFormData.test.ts @@ -1,7 +1,7 @@ import { DynamicFieldProps, FormAttributeValue, - RelationshipValueFormPool, + RelationshipValueFromPool, } from "@/components/form/type"; import { getUpdateMutationFromFormData } from "@/components/form/utils/mutations/getUpdateMutationFromFormData"; import { describe, expect } from "vitest"; @@ -111,7 +111,7 @@ describe("getUpdateMutationFromFormData - test", () => { defaultValue: { source: { type: "user" }, value: { id: "value1" } }, }), ]; - const formData: Record = { + const formData: Record = { field1: { source: { type: "pool", diff --git a/frontend/app/tests/unit/components/form/utils/isFieldDisabled.test.ts b/frontend/app/tests/unit/components/form/utils/isFieldDisabled.test.ts index 952420bb82..d958578d9c 100644 --- a/frontend/app/tests/unit/components/form/utils/isFieldDisabled.test.ts +++ b/frontend/app/tests/unit/components/form/utils/isFieldDisabled.test.ts @@ -81,20 +81,4 @@ describe("isFieldDisabled", () => { // THEN expect(disabled).to.equal(false); }); - - it("returns false if the auth is an admin", () => { - // GIVEN - const params: IsFieldDisabledParams = { - isProtected: true, - isReadOnly: false, - owner: { id: "not-admin" }, - auth: { permissions: { isAdmin: true } }, - }; - - // WHEN - const disabled = isFieldDisabled(params); - - // THEN - expect(disabled).to.equal(false); - }); }); diff --git a/frontend/app/tests/unit/data/peerDropdownOptionsFromApi.ts b/frontend/app/tests/unit/data/peerDropdownOptionsFromApi.ts deleted file mode 100644 index 90da436cf4..0000000000 --- a/frontend/app/tests/unit/data/peerDropdownOptionsFromApi.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { iPeerDropdownOptions } from "@/utils/dropdownOptionsForRelatedPeers"; - -export const C_PEER_DROPDOWN_OPTIONS: iPeerDropdownOptions = { - account: [ - { - id: "8e76ac89-45d5-43ea-9949-079261fc3590", - display_label: "David Palmer", - }, - { - id: "158b48a7-b1e0-4ef5-8aca-d1cfa67c1b7d", - display_label: "Chloe O'Brian", - }, - { - id: "9c37e51a-559e-473b-9d0e-88829968e25d", - display_label: "Jack Bauer", - }, - { - id: "e5b3354e-d062-4663-a891-84b3479c44b1", - display_label: "Crm Synchronization", - }, - { - id: "9660634f-17c2-422a-8b96-e3a618b06e10", - display_label: "Pop-Builder", - }, - { - id: "402144d7-70dc-456f-bc59-9ea0159decea", - display_label: "Admin", - }, - ], - group: [ - { - id: "fdad5ffe-3c12-4417-9131-73f35e1bfeee", - display_label: "Architecture Team", - }, - { - id: "0831f2dd-4138-4b00-9fdc-c366abee2bd0", - display_label: "Engineering Team", - }, - { - id: "389d85ef-0159-42d3-9c52-b587b80e48d9", - display_label: "Operation Team", - }, - { - id: "4f95f402-5a8c-45b5-b039-bb3dd6ffefab", - display_label: "Admin", - }, - ], - repository: [], -}; diff --git a/frontend/app/tests/unit/data/schemaKindNameMap.ts b/frontend/app/tests/unit/data/schemaKindNameMap.ts deleted file mode 100644 index 4db9fc6fec..0000000000 --- a/frontend/app/tests/unit/data/schemaKindNameMap.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { iSchemaKindNameMap } from "../../../src/state/atoms/schemaKindName.atom"; - -export const C_SchemaKindNameMap: iSchemaKindNameMap = { - Account: "account", - account: "Account", - Group: "group", - group: "Group", - Repository: "repository", - repository: "Repository", -}; diff --git a/helm/Chart.yaml b/helm/Chart.yaml index 99fbe4d9df..670a5277ab 100644 --- a/helm/Chart.yaml +++ b/helm/Chart.yaml @@ -15,12 +15,12 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 2.8.0 +version: 3.0.0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "1.0.0-dev0" +appVersion: "1.0.0" dependencies: - name: neo4j diff --git a/helm/values.yaml b/helm/values.yaml index a705118a9e..7af95650d3 100644 --- a/helm/values.yaml +++ b/helm/values.yaml @@ -151,7 +151,7 @@ infrahubDemoData: command: - sh - -c - - "infrahubctl schema load models/base --wait 30 && infrahubctl run models/infrastructure_edge.py && infrahubctl repository add demo-edge https://github.com/opsmill/infrahub-demo-edge --read-only" + - "infrahubctl schema load models/base --wait 30 && infrahubctl run models/infrastructure_edge.py && infrahubctl menu load models/base_menu.yml && infrahubctl repository add demo-edge https://github.com/opsmill/infrahub-demo-edge --read-only" # ----------- Mesage Queue (Rabbit MQ) ----------- rabbitmq: diff --git a/models/base/dcim.yml b/models/base/dcim.yml index 58fd7e370f..b4cf2aa222 100644 --- a/models/base/dcim.yml +++ b/models/base/dcim.yml @@ -100,6 +100,7 @@ generics: identifier: "device__interface" optional: false cardinality: one + order_weight: 1 kind: Parent - name: tags peer: BuiltinTag @@ -157,6 +158,7 @@ generics: relationships: - name: mlag_domain label: MLAG Domain + order_weight: 1 peer: InfraMlagDomain kind: Attribute cardinality: one @@ -240,6 +242,7 @@ nodes: optional: false cardinality: one kind: Attribute + order_weight: 1 identifier: "site__devices" - name: interfaces peer: InfraInterface @@ -474,12 +477,14 @@ nodes: - name: circuit_id kind: Text unique: true + order_weight: 2 - name: description kind: Text optional: true - name: vendor_id kind: Text optional: true + order_weight: 3 - name: status kind: Dropdown choices: @@ -521,6 +526,7 @@ nodes: optional: false cardinality: one kind: Attribute + order_weight: 1 - name: endpoints peer: InfraCircuitEndpoint optional: true diff --git a/models/base/ipam.yml b/models/base/ipam.yml index 78befdbb16..929c9c51a3 100644 --- a/models/base/ipam.yml +++ b/models/base/ipam.yml @@ -39,11 +39,13 @@ nodes: - name: name kind: Text unique: true + order_weight: 2 - name: description kind: Text optional: true - name: vlan_id kind: Number + order_weight: 3 - name: status kind: Dropdown choices: @@ -86,6 +88,7 @@ nodes: cardinality: one kind: Attribute identifier: "site__vlans" + order_weight: 1 - name: gateway label: L3 Gateway peer: InfraInterfaceL3 diff --git a/models/base/routing.yml b/models/base/routing.yml index 064b7bd70c..ba9a720d25 100644 --- a/models/base/routing.yml +++ b/models/base/routing.yml @@ -18,9 +18,11 @@ nodes: - name: name kind: Text unique: true + order_weight: 1 - name: asn kind: Number unique: true + order_weight: 2 - name: description kind: Text optional: true @@ -30,6 +32,7 @@ nodes: optional: false cardinality: one kind: Attribute + order_weight: 3 - name: BGPPeerGroup namespace: Infra description: "A BGP Peer Group is used to regroup parameters that are shared across multiple peers" @@ -45,15 +48,18 @@ nodes: - name: name kind: Text unique: true + order_weight: 1 - name: description kind: Text optional: true - name: import_policies kind: Text optional: true + order_weight: 4 - name: export_policies kind: Text optional: true + order_weight: 5 relationships: - name: local_as identifier: bgppeergroup__local_as @@ -61,12 +67,14 @@ nodes: optional: true cardinality: one kind: Attribute + order_weight: 2 - name: remote_as identifier: bgppeergroup__remote_as peer: InfraAutonomousSystem optional: true cardinality: one kind: Attribute + order_weight: 3 - name: BGPSession namespace: Infra description: "A BGP Session represent a point to point connection between two routers" @@ -82,17 +90,21 @@ nodes: - name: type kind: Text enum: [EXTERNAL, INTERNAL] + order_weight: 1 - name: description kind: Text optional: true - name: import_policies kind: Text optional: true + order_weight: 8 - name: export_policies kind: Text optional: true + order_weight: 9 - name: status kind: Dropdown + order_weight: 6 choices: - name: active label: Active @@ -112,6 +124,7 @@ nodes: color: "#bfbfbf" - name: role kind: Dropdown + order_weight: 7 choices: - name: backbone label: Backbone @@ -132,24 +145,28 @@ nodes: optional: true cardinality: one kind: Attribute + order_weight: 2 - name: remote_as identifier: bgpsession__remote_as peer: InfraAutonomousSystem optional: false cardinality: one kind: Attribute + order_weight: 3 - name: local_ip identifier: bgpsession__local_ip peer: BuiltinIPAddress optional: true cardinality: one kind: Attribute + order_weight: 4 - name: remote_ip identifier: bgpsession__remote_ip peer: BuiltinIPAddress optional: false cardinality: one kind: Attribute + order_weight: 5 - name: device peer: InfraDevice optional: false diff --git a/models/base/service.yml b/models/base/service.yml index 698d981afc..ba31d0c35e 100644 --- a/models/base/service.yml +++ b/models/base/service.yml @@ -15,6 +15,7 @@ generics: kind: Text label: Name optional: false + order_weight: 1 nodes: - name: BackBoneService namespace: Infra @@ -30,24 +31,31 @@ nodes: kind: Text label: Circuit ID optional: false + order_weight: 3 - name: internal_circuit_id kind: Text label: Internal Circuit ID optional: false + order_weight: 2 relationships: - name: provider cardinality: one peer: OrganizationProvider optional: false + kind: Attribute - name: site_a label: Site A cardinality: one peer: LocationSite optional: false identifier: infrabackboneservice__location_site_a + kind: Attribute + order_weight: 4 - name: site_b label: Site B cardinality: one peer: LocationSite optional: false identifier: infrabackboneservice__location_site_b + kind: Attribute + order_weight: 5 diff --git a/models/base_menu.yml b/models/base_menu.yml index aaf79f3dc3..96da36d917 100644 --- a/models/base_menu.yml +++ b/models/base_menu.yml @@ -7,12 +7,19 @@ spec: name: MainMenu label: Organization icon: "mdi:domain" + kind: OrganizationGeneric children: data: + - namespace: Organization + name: Generic + label: All Organizations + kind: OrganizationGeneric + icon: "mdi:domain" + - namespace: Organization name: Manufacturer label: Manufacturer - kind: LocationManufacturer + kind: OrganizationManufacturer icon: "mdi:domain" - namespace: Organization @@ -28,12 +35,18 @@ spec: icon: "mdi:domain" - namespace: Location - name: Generic + name: Menu label: Location kind: LocationGeneric icon: "mingcute:location-line" children: data: + - namespace: Location + name: Generic + label: All Locations + kind: LocationGeneric + icon: "mingcute:location-line" + - namespace: Location name: Continent label: Continent @@ -65,35 +78,46 @@ spec: children: data: - namespace: Infra - name: Device - label: Device - kind: InfraDevice + name: NetworkDeviceMenu + label: Network Device icon: "mdi:server" children: data: + - namespace: Infra + name: Device + label: Device + kind: InfraDevice + icon: "mdi:server" - name: Interface namespace: Infra label: "Interface" icon: "mdi:ethernet" kind: InfraInterface + - namespace: Infra + name: MlagMenu + label: MLAG + icon: "eos-icons:cluster-management" + children: + data: + - name: MlagDomain + namespace: Infra + label: "MLAG Domain" + icon: "eos-icons:cluster-management" + kind: InfraMlagDomain + + - name: MlagInterface + namespace: Infra + label: "MLAG Interface" + icon: "mdi:ethernet" + kind: InfraMlagInterface + - namespace: Infra name: Platform label: Platform kind: InfraPlatform icon: "mdi:application-cog-outline" - - name: MlagDomain - namespace: Infra - label: "MLAG Domain" - icon: "eos-icons:cluster-management" - kind: InfraMlagDomain - - - name: MlagInterface - namespace: Infra - label: "MLAG Interface" - icon: "mdi:ethernet" - kind: InfraMlagInterface - namespace: Infra name: CircuitMenu diff --git a/models/infrastructure_edge.py b/models/infrastructure_edge.py index d6949d8709..66a397e3c3 100644 --- a/models/infrastructure_edge.py +++ b/models/infrastructure_edge.py @@ -9,11 +9,15 @@ from infrahub_sdk import InfrahubClient from infrahub_sdk.batch import InfrahubBatch +from infrahub_sdk.exceptions import NodeNotFoundError from infrahub_sdk.protocols import ( CoreAccount, CoreAccountGroup, + CoreAccountRole, + CoreGlobalPermission, CoreIPAddressPool, CoreIPPrefixPool, + CoreObjectPermission, CoreStandardGroup, IpamNamespace, ) @@ -136,13 +140,38 @@ def translate_str_to_bool(key: str, value: str) -> bool: # pylint: skip-file +class AccountRole(BaseModel): + name: str + global_permissions: list[str] | str | None = None + object_permissions: list[str] | str | None = None + + +class AccountGroup(BaseModel): + name: str + roles: list[str] = Field(default_factory=list) + members: list[str] = Field(default_factory=list) + + class Account(BaseModel): name: str + label: str password: str account_type: str role: str +class GlobalPermission(BaseModel): + action: str + decision: int + + +class ObjectPermission(BaseModel): + namespace: str + name: str + action: str + decision: int + + class Asn(BaseModel): asn: int organization: str @@ -621,17 +650,64 @@ def site_generator(nbr_site: int = 2) -> list[Site]: INTERFACE_OBJS: dict[str, list[InfraInterfaceL3]] = defaultdict(list) +GLOBAL_PERMISSIONS = ( + GlobalPermission(action="edit_default_branch", decision=6), + GlobalPermission(action="merge_branch", decision=6), + GlobalPermission(action="merge_proposed_change", decision=6), + GlobalPermission(action="manage_schema", decision=6), + GlobalPermission(action="manage_accounts", decision=6), + GlobalPermission(action="manage_permissions", decision=6), + GlobalPermission(action="manage_repositories", decision=6), +) + +OBJECT_PERMISSIONS = { + "deny_any": ObjectPermission(namespace="*", name="*", action="any", decision=1), + "allow_any": ObjectPermission(namespace="*", name="*", action="any", decision=6), + "allow_branches": ObjectPermission(namespace="*", name="*", action="any", decision=4), + "view_any": ObjectPermission(namespace="*", name="*", action="view", decision=6), +} + +ACCOUNT_ROLES = ( + AccountRole(name="Administrator", global_permissions="__all__", object_permissions=["allow_any"]), + AccountRole(name="Global read-only", object_permissions=["deny_any", "view_any"]), + AccountRole( + name="Global read-write", + global_permissions=["edit_default_branch", "merge_branch", "merge_proposed_change"], + object_permissions=["allow_any"], + ), + AccountRole(name="Own branches read-write", object_permissions=["allow_branches"]), +) + ACCOUNTS = ( - Account(name="pop-builder", account_type="Script", password="Password123", role="read-write"), - Account(name="CRM Synchronization", account_type="Script", password="Password123", role="read-write"), - Account(name="Jack Bauer", account_type="User", password="Password123", role="read-only"), - Account(name="Chloe O'Brian", account_type="User", password="Password123", role="read-write"), - Account(name="David Palmer", account_type="User", password="Password123", role="read-write"), - Account(name="Operation Team", account_type="User", password="Password123", role="read-only"), - Account(name="Engineering Team", account_type="User", password="Password123", role="read-write"), - Account(name="Architecture Team", account_type="User", password="Password123", role="read-only"), + Account(name="pop-builder", label="pop-builder", account_type="Script", password="Password123", role="read-write"), + Account( + name="crm-sync", label="CRM Synchronization", account_type="Script", password="Password123", role="read-write" + ), + Account(name="jbauer", label="Jack Bauer", account_type="User", password="Password123", role="read-only"), + Account(name="cobrian", label="Chloe O'Brian", account_type="User", password="Password123", role="read-write"), + Account(name="dpalmer", label="David Palmer", account_type="User", password="Password123", role="read-write"), + Account(name="sudo", label="Sue Dough", password="Password123", role="admin", account_type="User"), + Account(name="elawson", label="Emily Lawson", password="Password123", role="read-write", account_type="User"), + Account(name="jthompson", label="Jacob Thompson", password="Password123", role="read-write", account_type="User"), + Account(name="shernandez", label="Sofia Hernandez", password="Password123", role="read-write", account_type="User"), + Account(name="rpatel", label="Ryan Patel", password="Password123", role="read-only", account_type="User"), + Account(name="ocarter", label="Olivia Carter", password="Password123", role="read-only", account_type="User"), ) +ACCOUNT_GROUPS = { + "administrators": AccountGroup( + name="Administrators", roles=["Administrator"], members=["sudo", "pop-builder", "crm-sync"] + ), + "ops-team": AccountGroup( + name="Operations Team", roles=["Global read-only"], members=["jbauer", "elawson", "jthompson"] + ), + "eng-team": AccountGroup( + name="Engineering Team", roles=["Global read-write"], members=["cobrian", "shernandez", "rpatel"] + ), + "arch-team": AccountGroup( + name="Architecture Team", roles=["Own branches read-write"], members=["dpalmer", "ocarter"] + ), +} GROUPS = ( Group(name="edge_router", label="Edge Router"), @@ -907,8 +983,8 @@ async def generate_site_vlans( client: InfrahubClient, log: logging.Logger, branch: str, site: Site, site_id: int ) -> None: account_pop = store.get("pop-builder", kind=CoreAccount, raise_when_missing=True) - group_eng = store.get("Engineering Team", kind=CoreAccount, raise_when_missing=True) - group_ops = store.get("Operation Team", kind=CoreAccount, raise_when_missing=True) + group_eng = store.get("eng-team", kind=CoreAccountGroup, raise_when_missing=True) + group_ops = store.get("ops-team", kind=CoreAccountGroup, raise_when_missing=True) for vlan in VLANS: vlan_name = f"{site.name}_{vlan.role}" @@ -986,10 +1062,10 @@ async def generate_site( external_pool: CoreNode, site_design: SiteDesign, ) -> str: - group_eng = store.get("Engineering Team", kind=CoreAccount) - group_ops = store.get("Operation Team", kind=CoreAccount) + group_eng = store.get("eng-team", kind=CoreAccountGroup) + group_ops = store.get("ops-team", kind=CoreAccountGroup) account_pop = store.get("pop-builder", kind=CoreAccount) - account_crm = store.get("CRM Synchronization", kind=CoreAccount) + account_crm = store.get("crm-sync", kind=CoreAccount) internal_as = store.get(kind=InfraAutonomousSystem, key="Duff") country = store.get(kind=LocationCountry, key=site.country) @@ -1757,22 +1833,122 @@ async def generate_continents_countries(client: InfrahubClient, log: logging.Log log.info("Created continents and countries") -async def prepare_accounts(client: InfrahubClient, log: logging.Logger, branch: str, batch: InfrahubBatch) -> None: - groups = await client.filters(branch=branch, kind=CoreAccountGroup, name__value="Super Administrators") - store.set(key=groups[0].name, node=groups[0]) +async def prepare_permissions(client: InfrahubClient, log: logging.Logger, branch: str, batch: InfrahubBatch) -> None: + for p in GLOBAL_PERMISSIONS: + obj = await client.get( + branch=branch, kind="CoreGlobalPermission", hfid=[p.action, str(p.decision)], raise_when_missing=True + ) + store.set(key=p.action, node=obj) + + for name, p in OBJECT_PERMISSIONS.items(): + try: + obj = await client.get( + branch=branch, kind="CoreObjectPermission", hfid=[p.namespace, p.name, p.action, str(p.decision)] + ) + except NodeNotFoundError: + obj = await client.create(branch=branch, kind="CoreObjectPermission", data=p.model_dump()) + batch.add(task=obj.save, node=obj) + store.set(key=name, node=obj) - for account in ACCOUNTS: - data = account.model_dump() - data["member_of_groups"] = groups - obj = await client.create(branch=branch, kind="CoreAccount", data=data) +async def prepare_account_roles(client: InfrahubClient, log: logging.Logger, branch: str, batch: InfrahubBatch) -> None: + for role in ACCOUNT_ROLES: + obj = await client.create( + branch=branch, + kind="CoreAccountRole", + data=role.model_dump(exclude={"global_permissions", "object_permissions"}), + ) + batch.add(task=obj.save, node=obj) + store.set(key=role.name, node=obj) + + +async def prepare_accounts(client: InfrahubClient, log: logging.Logger, branch: str, batch: InfrahubBatch) -> None: + for account in ACCOUNTS: + obj = await client.create(branch=branch, kind="CoreAccount", data=account.model_dump(exclude={"groups"})) batch.add(task=obj.save, node=obj) store.set(key=account.name, node=obj) + for name, group in ACCOUNT_GROUPS.items(): + obj = await client.create( + branch=branch, kind="CoreAccountGroup", data=group.model_dump(exclude={"roles", "members"}) + ) + batch.add(task=obj.save, node=obj) + store.set(key=name, node=obj) + + +async def map_permissions_to_roles( + client: InfrahubClient, log: logging.Logger, branch: str, batch: InfrahubBatch +) -> None: + for role in ACCOUNT_ROLES: + if not role.global_permissions and not role.object_permissions: + continue + + obj = store.get(role.name, kind=CoreAccountRole, raise_when_missing=True) + await obj.permissions.fetch() + + permissions: list[CoreGlobalPermission | CoreObjectPermission] = [] + if role.global_permissions: + if isinstance(role.global_permissions, str) and role.global_permissions == "__all__": + permissions.extend( + [ + store.get(p.action, kind=CoreGlobalPermission, raise_when_missing=True) + for p in GLOBAL_PERMISSIONS + ] + ) + else: + permissions.extend( + [ + store.get(p_name, kind=CoreGlobalPermission, raise_when_missing=True) + for p_name in role.global_permissions + ] + ) + if role.object_permissions: + if isinstance(role.object_permissions, str) and role.object_permissions == "__all__": + permissions.extend( + [ + store.get(p_name, kind=CoreObjectPermission, raise_when_missing=True) + for p_name in GLOBAL_PERMISSIONS + ] + ) + else: + permissions.extend( + [ + store.get(p_name, kind=CoreObjectPermission, raise_when_missing=True) + for p_name in role.object_permissions + ] + ) + + obj.permissions.extend(permissions) + batch.add(task=obj.save, node=obj) + + +async def map_user_and_roles_to_groups( + client: InfrahubClient, log: logging.Logger, branch: str, batch: InfrahubBatch +) -> None: + for group_name, group in ACCOUNT_GROUPS.items(): + updated = False + obj = store.get(group_name, kind=CoreAccountGroup, raise_when_missing=True) + + if group.roles: + await obj.roles.fetch() + obj.roles.extend( + data=[store.get(role, kind=CoreAccountRole, raise_when_missing=True) for role in group.roles] + ) + updated = True + if group.members: + await obj.members.fetch() + obj.members.extend( + data=[store.get(member, kind=CoreAccount, raise_when_missing=True) for member in group.members] + ) + updated = True + + if updated: + batch.add(task=obj.save, node=obj) + async def prepare_asns(client: InfrahubClient, log: logging.Logger, branch: str, batch: InfrahubBatch) -> None: - account_chloe = store.get("Chloe O'Brian", kind=CoreAccount, raise_when_missing=True) - account_crm = store.get("CRM Synchronization", kind=CoreAccount, raise_when_missing=True) + account_chloe = store.get("cobrian", kind=CoreAccount, raise_when_missing=True) + account_crm = store.get("crm-sync", kind=CoreAccount, raise_when_missing=True) organizations_dict = {org.name: org.type for org in ORGANIZATIONS} for asn in ASNS: organization_type = organizations_dict.get(asn.organization, None) @@ -1946,10 +2122,32 @@ async def run( # ------------------------------------------ # Create User Accounts, Groups, Organizations & Platforms # ------------------------------------------ - log.info("Creating User Accounts, Groups & Organizations & Platforms") + log.info("Creating User Accounts, Groups, Roles, Permissions & Organizations & Platforms") + + batch = await client.create_batch() + await prepare_permissions(client=client, log=log, branch=branch, batch=batch) + await prepare_account_roles(client=client, log=log, branch=branch, batch=batch) + async for node, _ in batch.execute(): + if hasattr(node, "name"): + log.info(f"- Created {node._schema.kind} - {node.name.value}") + else: + log.info(f"- Created {node._schema.kind} - {node}") batch = await client.create_batch() await prepare_accounts(client=client, log=log, branch=branch, batch=batch) + async for node, _ in batch.execute(): + log.info(f"- Created {node._schema.kind} - {node.name.value}") + + batch = await client.create_batch() + await map_permissions_to_roles(client=client, log=log, branch=branch, batch=batch) + async for node, _ in batch.execute(): + log.info(f"- Updated {node._schema.kind} - {node.name.value} with permissions") + + batch = await client.create_batch() + await map_user_and_roles_to_groups(client=client, log=log, branch=branch, batch=batch) + async for node, _ in batch.execute(): + log.info(f"- Updated {node._schema.kind} - {node.name.value} with roles and members") + await prepare_groups(client=client, log=log, branch=branch, batch=batch) await prepare_platforms(client=client, log=log, branch=branch, batch=batch) await prepare_organizations(client=client, log=log, branch=branch, batch=batch) diff --git a/poetry.lock b/poetry.lock index 460dde50ab..edea06928b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4679,29 +4679,29 @@ files = [ [[package]] name = "ruff" -version = "0.6.6" +version = "0.7.1" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.6.6-py3-none-linux_armv6l.whl", hash = "sha256:f5bc5398457484fc0374425b43b030e4668ed4d2da8ee7fdda0e926c9f11ccfb"}, - {file = "ruff-0.6.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:515a698254c9c47bb84335281a170213b3ee5eb47feebe903e1be10087a167ce"}, - {file = "ruff-0.6.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6bb1b4995775f1837ab70f26698dd73852bbb82e8f70b175d2713c0354fe9182"}, - {file = "ruff-0.6.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69c546f412dfae8bb9cc4f27f0e45cdd554e42fecbb34f03312b93368e1cd0a6"}, - {file = "ruff-0.6.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:59627e97364329e4eae7d86fa7980c10e2b129e2293d25c478ebcb861b3e3fd6"}, - {file = "ruff-0.6.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94c3f78c3d32190aafbb6bc5410c96cfed0a88aadb49c3f852bbc2aa9783a7d8"}, - {file = "ruff-0.6.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:704da526c1e137f38c8a067a4a975fe6834b9f8ba7dbc5fd7503d58148851b8f"}, - {file = "ruff-0.6.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:efeede5815a24104579a0f6320660536c5ffc1c91ae94f8c65659af915fb9de9"}, - {file = "ruff-0.6.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e368aef0cc02ca3593eae2fb8186b81c9c2b3f39acaaa1108eb6b4d04617e61f"}, - {file = "ruff-0.6.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2653fc3b2a9315bd809725c88dd2446550099728d077a04191febb5ea79a4f79"}, - {file = "ruff-0.6.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:bb858cd9ce2d062503337c5b9784d7b583bcf9d1a43c4df6ccb5eab774fbafcb"}, - {file = "ruff-0.6.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:488f8e15c01ea9afb8c0ba35d55bd951f484d0c1b7c5fd746ce3c47ccdedce68"}, - {file = "ruff-0.6.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:aefb0bd15f1cfa4c9c227b6120573bb3d6c4ee3b29fb54a5ad58f03859bc43c6"}, - {file = "ruff-0.6.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a4c0698cc780bcb2c61496cbd56b6a3ac0ad858c966652f7dbf4ceb029252fbe"}, - {file = "ruff-0.6.6-py3-none-win32.whl", hash = "sha256:aadf81ddc8ab5b62da7aae78a91ec933cbae9f8f1663ec0325dae2c364e4ad84"}, - {file = "ruff-0.6.6-py3-none-win_amd64.whl", hash = "sha256:0adb801771bc1f1b8cf4e0a6fdc30776e7c1894810ff3b344e50da82ef50eeb1"}, - {file = "ruff-0.6.6-py3-none-win_arm64.whl", hash = "sha256:4b4d32c137bc781c298964dd4e52f07d6f7d57c03eae97a72d97856844aa510a"}, - {file = "ruff-0.6.6.tar.gz", hash = "sha256:0fc030b6fd14814d69ac0196396f6761921bd20831725c7361e1b8100b818034"}, + {file = "ruff-0.7.1-py3-none-linux_armv6l.whl", hash = "sha256:cb1bc5ed9403daa7da05475d615739cc0212e861b7306f314379d958592aaa89"}, + {file = "ruff-0.7.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:27c1c52a8d199a257ff1e5582d078eab7145129aa02721815ca8fa4f9612dc35"}, + {file = "ruff-0.7.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:588a34e1ef2ea55b4ddfec26bbe76bc866e92523d8c6cdec5e8aceefeff02d99"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94fc32f9cdf72dc75c451e5f072758b118ab8100727168a3df58502b43a599ca"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:985818742b833bffa543a84d1cc11b5e6871de1b4e0ac3060a59a2bae3969250"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32f1e8a192e261366c702c5fb2ece9f68d26625f198a25c408861c16dc2dea9c"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:699085bf05819588551b11751eff33e9ca58b1b86a6843e1b082a7de40da1565"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:344cc2b0814047dc8c3a8ff2cd1f3d808bb23c6658db830d25147339d9bf9ea7"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4316bbf69d5a859cc937890c7ac7a6551252b6a01b1d2c97e8fc96e45a7c8b4a"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d3af9dca4c56043e738a4d6dd1e9444b6d6c10598ac52d146e331eb155a8ad"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c5c121b46abde94a505175524e51891f829414e093cd8326d6e741ecfc0a9112"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8422104078324ea250886954e48f1373a8fe7de59283d747c3a7eca050b4e378"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:56aad830af8a9db644e80098fe4984a948e2b6fc2e73891538f43bbe478461b8"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:658304f02f68d3a83c998ad8bf91f9b4f53e93e5412b8f2388359d55869727fd"}, + {file = "ruff-0.7.1-py3-none-win32.whl", hash = "sha256:b517a2011333eb7ce2d402652ecaa0ac1a30c114fbbd55c6b8ee466a7f600ee9"}, + {file = "ruff-0.7.1-py3-none-win_amd64.whl", hash = "sha256:f38c41fcde1728736b4eb2b18850f6d1e3eedd9678c914dede554a70d5241307"}, + {file = "ruff-0.7.1-py3-none-win_arm64.whl", hash = "sha256:19aa200ec824c0f36d0c9114c8ec0087082021732979a359d6f3c390a6ff2a37"}, + {file = "ruff-0.7.1.tar.gz", hash = "sha256:9d8a41d4aa2dad1575adb98a82870cf5db5f76b2938cf2206c22c940034a36f4"}, ] [[package]] @@ -5789,4 +5789,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.10, < 3.13" -content-hash = "5ed95f29951a02e0a58ebf5084a57ef69809a698eb8759d05f30921e3e3acdaa" +content-hash = "dc107e58cbb2aaba133ae8a77241e37bfa9852674c7544f107c2f5d2447620c5" diff --git a/pyproject.toml b/pyproject.toml index 72311d3be9..a28910aac4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "infrahub-server" -version = "1.0.0-dev0" +version = "1.0.0" description = "Infrahub is taking a new approach to Infrastructure Management by providing a new generation of datastore to organize and control all the data that defines how an infrastructure should run." authors = ["OpsMill "] readme = "README.md" @@ -90,7 +90,7 @@ pre-commit = "^2.20.0" types-toml = "*" types-ujson = "*" types-pyyaml = "*" -ruff = "0.6.6" +ruff = "0.7.1" invoke = "2.2.0" pytest-benchmark = "^4.0.0" pytest-codspeed = "^2.2.0" @@ -252,10 +252,6 @@ ignore_errors = true module = "infrahub.core.query" ignore_errors = true -[[tool.mypy.overrides]] -module = "infrahub.core.query.attribute" -ignore_errors = true - [[tool.mypy.overrides]] module = "infrahub.core.query.diff" ignore_errors = true @@ -292,7 +288,6 @@ ignore_errors = true module = "infrahub.core.schema.manager" ignore_errors = true - [[tool.mypy.overrides]] module = "infrahub.core.utils" ignore_errors = true @@ -341,10 +336,6 @@ ignore_errors = true module = "infrahub.graphql.resolver" ignore_errors = true -[[tool.mypy.overrides]] -module = "infrahub.graphql.subscription" -ignore_errors = true - [[tool.mypy.overrides]] module = "infrahub.graphql.types.standard_node" ignore_errors = true @@ -497,7 +488,6 @@ ignore = [ "C409", # Unnecessary `list` literal passed to `tuple()` (rewrite as a `tuple` literal) "C414", # Unnecessary `list` call within `sorted()` "C420", # Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead - "E721", # Use `is` and `is not` for type comparisons, or `isinstance()` for isinstance checks "FURB113", # Use `networks.extend(...)` instead of repeatedly calling `networks.append()` "FURB116", # Replace `bin` call with f-string "FURB118", # Use `operator.itemgetter(1)` instead of defining a lambda @@ -545,7 +535,6 @@ ignore = [ "RUF006", # Store a reference to the return value of `asyncio.create_task` "RUF010", # Use explicit conversion flag "RUF012", # Mutable class attributes should be annotated with `typing.ClassVar` - "RUF013", # PEP 484 prohibits implicit `Optional` "RUF015", # Prefer `next(...)` over single element slice "RUF021", # Parenthesize `a and b` expressions when chaining `and` and `or` together, to make the precedence clear "RUF027", # Possible f-string without an `f` prefix @@ -566,10 +555,8 @@ ignore = [ "SIM117", # Use a single `with` statement with multiple contexts instead of nested `with` statements "SIM118", # Use `key in dict` instead of `key in dict.keys()` "SIM201", # Use `backup_path.suffix != ".backup"` instead of `not backup_path.suffix == ".backup"` - "SIM300", # Yoda condition detected "SIM401", # Use `property["items"].get("format", None)` instead of an `if` block "SIM910", # Use `data.get("identifier")` instead of `data.get("identifier", None)` - "UP007", # Use X | Y for type annotations "UP012", # Unnecessary call to encode as UTF-8 "UP018", # Unnecessary {literal_type} call (rewrite as a literal) "UP031", # Use format specifiers instead of percent format @@ -608,6 +595,7 @@ allow-dunder-method-names = [ "ANN003", # Missing type annotation for `**kwargs` "ANN204", # Missing return type annotation for special method "ANN401", # Dynamically typed expressions (typing.Any) are disallowed + "UP007", # Use X | Y for type annotations ] "backend/infrahub/config.py" = [ @@ -621,13 +609,6 @@ allow-dunder-method-names = [ "ANN206", # Missing return type annotation for classmethod ] -"backend/infrahub/graphql/schema.py" = [ - ################################################################################################## - # Review and change the below later # - ################################################################################################## - "ANN201", # Missing return type annotation for public function -] - "backend/tests/**.py" = [ "S101", # Use of assert detected "S105", # Possible hardcoded password assigned to variable @@ -651,8 +632,9 @@ allow-dunder-method-names = [ # like this so that we can reactivate them one by one. Alternatively ignored after further # # investigation if they are deemed to not make sense. # ################################################################################################## - "C901", # `generate_site` is too complex (34 > 33)" - "E501", # Line too long + "C901", # `generate_site` is too complex (34 > 33)" + "E501", # Line too long + "RUF013", # PEP 484 prohibits implicit `Optional` ] "utilities/**.py" = [ diff --git a/python_sdk b/python_sdk index ed48858f4c..5bbb22d8a9 160000 --- a/python_sdk +++ b/python_sdk @@ -1 +1 @@ -Subproject commit ed48858f4cf579f85cdd2679f48025a907f6b800 +Subproject commit 5bbb22d8a914611d32e51ea0e93ea8db5b4e9776 diff --git a/tasks/container_ops.py b/tasks/container_ops.py index 5f46412ba0..024e4158c3 100644 --- a/tasks/container_ops.py +++ b/tasks/container_ops.py @@ -1,7 +1,7 @@ from __future__ import annotations import sys -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from .shared import ( AVAILABLE_SERVICES, @@ -26,7 +26,7 @@ def build_images( nocache: bool, database: str, namespace: Namespace, - service: Optional[str] = None, + service: str | None = None, ) -> None: if service and service not in AVAILABLE_SERVICES: sys.exit(f"{service} is not a valid service ({AVAILABLE_SERVICES})") diff --git a/tasks/demo.py b/tasks/demo.py index efb5cdd800..6dfa94b464 100644 --- a/tasks/demo.py +++ b/tasks/demo.py @@ -16,7 +16,7 @@ stop_services, update_core_schema, ) -from .infra_ops import load_infrastructure_data, load_infrastructure_schema +from .infra_ops import load_infrastructure_data, load_infrastructure_menu, load_infrastructure_schema from .shared import ( BUILD_NAME, INFRAHUB_DATABASE, @@ -73,7 +73,7 @@ def start(context: Context, database: str = INFRAHUB_DATABASE, wait: bool = Fals @task(optional=["database"]) def restart(context: Context, database: str = INFRAHUB_DATABASE) -> None: - """Restart Infrahub API Server and Git Agent within docker compose.""" + """Restart Infrahub API Server and Task worker within docker compose.""" restart_services(context=context, database=database, namespace=NAMESPACE) @@ -128,9 +128,16 @@ def status(context: Context, database: str = INFRAHUB_DATABASE) -> None: def load_infra_schema(context: Context, database: str = INFRAHUB_DATABASE) -> None: """Load the base schema for infrastructure.""" load_infrastructure_schema(context=context, database=database, namespace=NAMESPACE, add_wait=False) + load_infrastructure_menu(context=context, database=database, namespace=NAMESPACE) restart_services(context=context, database=database, namespace=NAMESPACE) +@task(optional=["database"]) +def load_infra_menu(context: Context, database: str = INFRAHUB_DATABASE) -> None: + """Load the base schema for infrastructure.""" + load_infrastructure_menu(context=context, database=database, namespace=NAMESPACE) + + @task(optional=["database"]) def load_infra_data(context: Context, database: str = INFRAHUB_DATABASE) -> None: """Load infrastructure demo data.""" diff --git a/tasks/dev.py b/tasks/dev.py index 71fe3ad74d..dd306cab7c 100644 --- a/tasks/dev.py +++ b/tasks/dev.py @@ -3,7 +3,7 @@ import re from enum import Enum from pathlib import Path -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any from invoke.tasks import task @@ -18,7 +18,7 @@ stop_services, update_core_schema, ) -from .infra_ops import load_infrastructure_data, load_infrastructure_schema +from .infra_ops import load_infrastructure_data, load_infrastructure_menu, load_infrastructure_schema from .shared import ( BUILD_NAME, INFRAHUB_DATABASE, @@ -43,7 +43,7 @@ @task(optional=["database"]) def build( context: Context, - service: Optional[str] = None, + service: str | None = None, python_ver: str = PYTHON_VER, nocache: bool = False, database: str = INFRAHUB_DATABASE, @@ -142,6 +142,7 @@ def load_infra_data(context: Context, database: str = INFRAHUB_DATABASE) -> None def load_infra_schema(context: Context, database: str = INFRAHUB_DATABASE) -> None: """Load the base schema for infrastructure.""" load_infrastructure_schema(context=context, database=database, namespace=NAMESPACE, add_wait=False) + load_infrastructure_menu(context=context, database=database, namespace=NAMESPACE) restart_services(context=context, database=database, namespace=NAMESPACE) @@ -153,7 +154,7 @@ def pull(context: Context, database: str = INFRAHUB_DATABASE) -> None: @task(optional=["database"]) def restart(context: Context, database: str = INFRAHUB_DATABASE) -> None: - """Restart Infrahub API Server and Git Agent within docker compose.""" + """Restart Infrahub API Server and Task worker within docker compose.""" restart_services(context=context, database=database, namespace=NAMESPACE) @@ -190,7 +191,7 @@ def get_version_from_pyproject() -> str: @task -def update_helm_chart(context: Context, chart_file: Optional[str] = "helm/Chart.yaml") -> None: +def update_helm_chart(context: Context, chart_file: str | None = "helm/Chart.yaml") -> None: """Update helm/Chart.yaml with the current version from pyproject.toml.""" version = get_version_from_pyproject() version_pattern = r"^appVersion:\s*[\d\.\-a-zA-Z]+" @@ -208,7 +209,7 @@ def replace_version(match: str) -> str: @task -def update_docker_compose(context: Context, docker_file: Optional[str] = "docker-compose.yml") -> None: +def update_docker_compose(context: Context, docker_file: str | None = "docker-compose.yml") -> None: """Update docker-compose.yml with the current version from pyproject.toml.""" version = get_version_from_pyproject() version_pattern = r"registry.opsmill.io/opsmill/infrahub:\$\{VERSION:-[\d\.\-a-zA-Z]+\}" @@ -226,7 +227,17 @@ def replace_version(match: str) -> str: def get_enum_mappings() -> dict: """Extracts enum mappings dynamically.""" - from infrahub.config import BrokerDriver, CacheDriver, StorageDriver, TraceExporterType, TraceTransportProtocol + from infrahub.config import ( + BrokerDriver, + CacheDriver, + Oauth2Provider, + OIDCProvider, + SSOProtocol, + StorageDriver, + TraceExporterType, + TraceTransportProtocol, + WorkflowDriver, + ) from infrahub.database.constants import DatabaseType enum_mappings = {} @@ -234,10 +245,14 @@ def get_enum_mappings() -> dict: for enum_class in [ BrokerDriver, CacheDriver, - DatabaseType, + Oauth2Provider, + OIDCProvider, + SSOProtocol, StorageDriver, TraceExporterType, TraceTransportProtocol, + WorkflowDriver, + DatabaseType, ]: for item in enum_class: enum_mappings[item] = item.value @@ -249,9 +264,11 @@ def update_docker_compose_env_vars( env_vars: list[str], env_defaults: dict[str, Any], enum_mappings: dict[Any, str], - docker_file: Optional[str] = "docker-compose.yml", + docker_file: str | None = "docker-compose.yml", ) -> None: """Update the docker-compose.yml file with the environment variables.""" + import json + docker_path = Path(docker_file) docker_compose = docker_path.read_text(encoding="utf-8").splitlines() @@ -279,12 +296,17 @@ def update_docker_compose_env_vars( new_config_lines = [] for var in all_vars: + if var.startswith("INFRAHUB_DEV"): + continue default_value = env_defaults.get(var, "") if isinstance(default_value, bool): - default_value = str(default_value).lower() + default_value_str = str(default_value).lower() elif isinstance(default_value, Enum): - default_value = enum_mappings.get(default_value, str(default_value)) - default_value_str = str(default_value) if default_value is not None else "" + default_value_str = enum_mappings.get(default_value, str(default_value)) + elif isinstance(default_value, list): + default_value_str = json.dumps(default_value) + else: + default_value_str = str(default_value) if default_value is not None else "" if var in existing_vars: line_idx = existing_vars[var] @@ -293,14 +315,24 @@ def update_docker_compose_env_vars( match = pattern.match(existing_value) if match and match.group(1) == var and match.group(2) == default_value_str: new_config_lines.append(docker_compose[line_idx]) - elif var in ["INFRAHUB_BROKER_USERNAME", "INFRAHUB_BROKER_PASSWORD"]: + elif var in [ + "INFRAHUB_BROKER_USERNAME", + "INFRAHUB_BROKER_PASSWORD", + "INFRAHUB_CACHE_USERNAME", + "INFRAHUB_CACHE_PASSWORD", + ]: key_name = var.replace("INFRAHUB_", "").lower() new_config_lines.append(f" {var}: &{key_name} ${{{var}:-{default_value_str}}}") elif default_value_str: new_config_lines.append(f" {var}: ${{{var}:-{default_value_str}}}") else: new_config_lines.append(f" {var}:") - elif var in ["INFRAHUB_BROKER_USERNAME", "INFRAHUB_BROKER_PASSWORD"]: + elif var in [ + "INFRAHUB_BROKER_USERNAME", + "INFRAHUB_BROKER_PASSWORD", + "INFRAHUB_CACHE_USERNAME", + "INFRAHUB_CACHE_PASSWORD", + ]: key_name = var.replace("INFRAHUB_", "").lower() new_config_lines.append(f" {var}: &{key_name} ${{{var}:-{default_value_str}}}") elif default_value_str: @@ -316,7 +348,7 @@ def update_docker_compose_env_vars( @task def gen_config_env( - context: Context, docker_file: Optional[str] = "docker-compose.yml", update_docker_file: Optional[bool] = False + context: Context, docker_file: str | None = "docker-compose.yml", update_docker_file: bool | None = False ) -> None: """Generate list of env vars required for configuration and update docker file.yml if need be.""" from pydantic_settings import BaseSettings diff --git a/tasks/infra_ops.py b/tasks/infra_ops.py index 6f656dd58e..9122804351 100644 --- a/tasks/infra_ops.py +++ b/tasks/infra_ops.py @@ -38,7 +38,13 @@ def load_infrastructure_schema( command_schema += " --wait 30" execute_command(context=context, command=command_schema) - menu_target = "models/base_menu.yml" - if namespace == Namespace.DEV: - command_menu = f"{base_cmd} run {SERVICE_WORKER_NAME} infrahubctl menu load {menu_target}" - execute_command(context=context, command=command_menu) + +def load_infrastructure_menu( + context: Context, database: str, namespace: Namespace, menu_target: str = "models/base_menu.yml" +) -> None: + with context.cd(ESCAPED_REPO_PATH): + compose_files_cmd = build_compose_files_cmd(database=database, namespace=namespace) + compose_cmd = get_compose_cmd(namespace=namespace) + base_cmd = f"{get_env_vars(context, namespace=namespace)} {compose_cmd} {compose_files_cmd} -p {BUILD_NAME}" + command = f"{base_cmd} run {SERVICE_WORKER_NAME} infrahubctl menu load {menu_target}" + execute_command(context=context, command=command) diff --git a/tasks/shared.py b/tasks/shared.py index 98c4fce1cd..5359293e77 100644 --- a/tasks/shared.py +++ b/tasks/shared.py @@ -242,9 +242,9 @@ def get_env_vars(context: Context, namespace: Namespace = Namespace.DEFAULT) -> if DATABASE_DOCKER_IMAGE: ENV_VARS_DICT["DATABASE_DOCKER_IMAGE"] = DATABASE_DOCKER_IMAGE - elif INFRAHUB_DATABASE == DatabaseType.NEO4J.value: + elif DatabaseType.NEO4J.value == INFRAHUB_DATABASE: ENV_VARS_DICT["DATABASE_DOCKER_IMAGE"] = NEO4J_DOCKER_IMAGE - elif INFRAHUB_DATABASE == DatabaseType.MEMGRAPH.value: + elif DatabaseType.MEMGRAPH.value == INFRAHUB_DATABASE: ENV_VARS_DICT["DATABASE_DOCKER_IMAGE"] = MEMGRAPH_DOCKER_IMAGE return " ".join([f"{key}={value}" for key, value in ENV_VARS_DICT.items()])