diff --git a/docs/docs/python-sdk/reference/compatibility.mdx b/docs/docs/python-sdk/reference/compatibility.mdx index 0d3842f7..8763c4e1 100644 --- a/docs/docs/python-sdk/reference/compatibility.mdx +++ b/docs/docs/python-sdk/reference/compatibility.mdx @@ -12,6 +12,7 @@ Each Infrahub release pins a specific SDK version. Using the matching SDK versio | Infrahub | Required SDK | Release date | | --- | --- | --- | +| 1.9.x | >= 1.20.0 | April 2026 | | 1.8.x | >= 1.19.0 | March 2026 | | 1.7.x | >= 1.18.1 | January 2026 | | 1.6.x | >= 1.16.0 | December 2025 | @@ -31,6 +32,12 @@ The table below shows the exact SDK version pinned to each Infrahub release. | Infrahub | SDK version | Infrahub release date | | --- | --- | --- | +| 1.9.3 | 1.20.0 | 2026-05-05 | +| 1.9.2 | 1.20.0 | 2026-04-30 | +| 1.9.1 | 1.20.0 | 2026-04-29 | +| 1.9.0 | 1.20.0 | 2026-04-24 | +| 1.8.6 | 1.19.0 | 2026-04-21 | +| 1.8.5 | 1.19.0 | 2026-04-17 | | 1.8.4 | 1.19.0 | 2026-04-02 | | 1.8.3 | 1.19.0 | 2026-03-31 | | 1.8.2 | 1.19.0 | 2026-03-25 | diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx index 54e9d838..639b3611 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx @@ -320,8 +320,9 @@ Allocate a new IP address by using the provided resource pool. - `timeout`: Flag to indicate whether to populate the store with the retrieved nodes. - `tracker`: The offset for pagination. -Returns: - InfrahubNode: Node corresponding to the allocated resource. +**Returns:** + +- Node corresponding to the allocated resource. #### `allocate_next_ip_prefix` @@ -359,8 +360,9 @@ Allocate a new IP prefix by using the provided resource pool. - `timeout`: Flag to indicate whether to populate the store with the retrieved nodes. - `tracker`: The offset for pagination. -Returns: - InfrahubNode: Node corresponding to the allocated resource. +**Returns:** + +- Node corresponding to the allocated resource. #### `create_batch` @@ -708,8 +710,9 @@ Allocate a new IP address by using the provided resource pool. - `timeout`: Flag to indicate whether to populate the store with the retrieved nodes. - `tracker`: The offset for pagination. -Returns: - InfrahubNodeSync: Node corresponding to the allocated resource. +**Returns:** + +- Node corresponding to the allocated resource. #### `allocate_next_ip_prefix` @@ -747,8 +750,9 @@ Allocate a new IP prefix by using the provided resource pool. - `timeout`: Flag to indicate whether to populate the store with the retrieved nodes. - `tracker`: The offset for pagination. -Returns: - InfrahubNodeSync: Node corresponding to the allocated resource. +**Returns:** + +- Node corresponding to the allocated resource. #### `repository_update_commit` diff --git a/docs/docs_generation/compatibility.py b/docs/docs_generation/compatibility.py index 2a17ff55..0f9c6f25 100644 --- a/docs/docs_generation/compatibility.py +++ b/docs/docs_generation/compatibility.py @@ -17,6 +17,7 @@ class ReleaseMapping: infrahub: Infrahub server version (e.g. "1.8.4"). sdk: SDK version pinned to this release (e.g. "1.19.0"). date: Infrahub release date in YYYY-MM-DD format. + """ infrahub: str @@ -32,6 +33,7 @@ class VersionRange: infrahub: Infrahub minor version pattern (e.g. "1.8.x"). min_sdk: Minimum required SDK version (e.g. "1.19.0"). date: Approximate release month (e.g. "March 2026"). + """ infrahub: str @@ -46,6 +48,7 @@ class PythonSupport: Args: sdk_range: SDK version range description (e.g. ">= 1.17.0"). python_versions: Comma-separated Python versions (e.g. "3.10, 3.11, 3.12, 3.13, 3.14"). + """ sdk_range: str @@ -60,6 +63,7 @@ class FeatureRequirement: feature: Feature name or description. min_sdk: Minimum SDK version required. min_infrahub: Minimum Infrahub version required. + """ feature: str @@ -70,6 +74,7 @@ class FeatureRequirement: # Mapping of Infrahub minor version series to minimum SDK versions. # Auto-updated by update_compatibility.py. VERSION_RANGES: list[VersionRange] = [ + VersionRange(infrahub="1.9.x", min_sdk="1.20.0", date="April 2026"), VersionRange(infrahub="1.8.x", min_sdk="1.19.0", date="March 2026"), VersionRange(infrahub="1.7.x", min_sdk="1.18.1", date="January 2026"), VersionRange(infrahub="1.6.x", min_sdk="1.16.0", date="December 2025"), @@ -85,6 +90,12 @@ class FeatureRequirement: # Detailed mapping of every Infrahub release to its pinned SDK version. # Auto-updated by update_compatibility.py. RELEASE_MAPPINGS: list[ReleaseMapping] = [ + ReleaseMapping(infrahub="1.9.3", sdk="1.20.0", date="2026-05-05"), + ReleaseMapping(infrahub="1.9.2", sdk="1.20.0", date="2026-04-30"), + ReleaseMapping(infrahub="1.9.1", sdk="1.20.0", date="2026-04-29"), + ReleaseMapping(infrahub="1.9.0", sdk="1.20.0", date="2026-04-24"), + ReleaseMapping(infrahub="1.8.6", sdk="1.19.0", date="2026-04-21"), + ReleaseMapping(infrahub="1.8.5", sdk="1.19.0", date="2026-04-17"), ReleaseMapping(infrahub="1.8.4", sdk="1.19.0", date="2026-04-02"), ReleaseMapping(infrahub="1.8.3", sdk="1.19.0", date="2026-03-31"), ReleaseMapping(infrahub="1.8.2", sdk="1.19.0", date="2026-03-25"), diff --git a/docs/docs_generation/content_gen_methods/command_output_method.py b/docs/docs_generation/content_gen_methods/command_output_method.py index bbb32f04..412ae43d 100644 --- a/docs/docs_generation/content_gen_methods/command_output_method.py +++ b/docs/docs_generation/content_gen_methods/command_output_method.py @@ -30,6 +30,7 @@ class CommandOutputDocContentGenMethod(ADocContentGenMethod): command=TyperCommand(module="infrahub_sdk.ctl.cli_commands", name="dump", app_name="infrahubctl", is_function=True), ) content = method.apply() + """ def __init__(self, context: Context, working_directory: Path, command: ACommand) -> None: diff --git a/docs/docs_generation/content_gen_methods/file_printing_method.py b/docs/docs_generation/content_gen_methods/file_printing_method.py index 1b07aa20..593163d5 100644 --- a/docs/docs_generation/content_gen_methods/file_printing_method.py +++ b/docs/docs_generation/content_gen_methods/file_printing_method.py @@ -13,6 +13,7 @@ class FilePrintingDocContentGenMethod(ADocContentGenMethod): Args: file: The ``MdxFile`` whose content will be returned. + """ def __init__(self, file: MdxFile) -> None: diff --git a/docs/docs_generation/content_gen_methods/jinja2_method.py b/docs/docs_generation/content_gen_methods/jinja2_method.py index c91c0d7c..0fb1ea64 100644 --- a/docs/docs_generation/content_gen_methods/jinja2_method.py +++ b/docs/docs_generation/content_gen_methods/jinja2_method.py @@ -29,6 +29,7 @@ class Jinja2DocContentGenMethod(ADocContentGenMethod): template_variables={"builtin": BUILTIN_FILTERS}, ) content = method.apply() + """ def __init__(self, template: Jinja2Template, template_variables: dict[str, Any]) -> None: diff --git a/docs/docs_generation/content_gen_methods/mdx/mdx_code_doc.py b/docs/docs_generation/content_gen_methods/mdx/mdx_code_doc.py index 1fb3e8c9..6f36914a 100644 --- a/docs/docs_generation/content_gen_methods/mdx/mdx_code_doc.py +++ b/docs/docs_generation/content_gen_methods/mdx/mdx_code_doc.py @@ -92,6 +92,7 @@ class MdxCodeDocumentation(ACodeDocumentation): doc = MdxCodeDocumentation() files = doc.generate(context=ctx, modules_to_document=["infrahub_sdk.node"]) + """ def __init__( diff --git a/docs/docs_generation/content_gen_methods/mdx/mdx_priority.py b/docs/docs_generation/content_gen_methods/mdx/mdx_priority.py index 20b55513..eeb56477 100644 --- a/docs/docs_generation/content_gen_methods/mdx/mdx_priority.py +++ b/docs/docs_generation/content_gen_methods/mdx/mdx_priority.py @@ -23,6 +23,7 @@ class PagePriority: classes: Ordered list of class/function names to appear first on the page. methods: Per-class ordered list of method names to appear first. Key is class name, value is ordered method name list. + """ sections: list[str] = field(default_factory=list) @@ -46,6 +47,7 @@ class SectionPriority: names: Ordered list of child section names to appear first. sub_priorities: Per-child priorities for deeper nesting. Key is child name, value is ordered subsection name list. + """ names: list[str] = field(default_factory=list) diff --git a/docs/docs_generation/content_gen_methods/mdx/mdx_section.py b/docs/docs_generation/content_gen_methods/mdx/mdx_section.py index ed4e25fa..b4f92a62 100644 --- a/docs/docs_generation/content_gen_methods/mdx/mdx_section.py +++ b/docs/docs_generation/content_gen_methods/mdx/mdx_section.py @@ -30,6 +30,7 @@ class MdxSection(ASection): name: Item name extracted from the heading (e.g. class or method name). heading_level: Markdown heading level (2, 3, or 4). _lines: All lines belonging to this section, including the heading. + """ name: str diff --git a/docs/docs_generation/helpers.py b/docs/docs_generation/helpers.py index 15ffff6b..0f1fe0a0 100644 --- a/docs/docs_generation/helpers.py +++ b/docs/docs_generation/helpers.py @@ -13,6 +13,7 @@ def get_env_vars() -> dict[str, list[str]]: Returns: Mapping of field name to list of upper-cased environment variable names. + """ env_vars: dict[str, list[str]] = defaultdict(list) settings = ConfigBase() @@ -47,6 +48,7 @@ def build_config_properties() -> list[dict[str, Any]]: Returns: List of dicts with keys: ``name``, ``description``, ``type``, ``choices``, ``default``, ``env_vars``. + """ schema = ConfigBase.model_json_schema() env_vars = get_env_vars() diff --git a/docs/docs_generation/pages/base.py b/docs/docs_generation/pages/base.py index a6b99831..66c33017 100644 --- a/docs/docs_generation/pages/base.py +++ b/docs/docs_generation/pages/base.py @@ -18,6 +18,7 @@ class DocPage: page = DocPage(content_gen_method=Jinja2DocContentGenMethod(...)) print(page.content()) + """ def __init__(self, content_gen_method: ADocContentGenMethod) -> None: @@ -38,6 +39,7 @@ class MDXDocPage: mdx = MDXDocPage(page=my_page, output_path=Path("docs/ref/client.mdx")) mdx.to_mdx() + """ _CONTROL_CHAR_RE = re.compile(r"[\x00-\x08\x0b\x0c\x0e-\x1f\x7f]") diff --git a/docs/docs_generation/update_compatibility.py b/docs/docs_generation/update_compatibility.py index d2b7e0d7..fb8f7113 100755 --- a/docs/docs_generation/update_compatibility.py +++ b/docs/docs_generation/update_compatibility.py @@ -52,6 +52,7 @@ def _paginated_get(client: httpx.Client, url: str) -> list[dict]: Returns: Combined list of JSON objects from all pages. + """ results: list[dict] = [] while url: @@ -72,6 +73,7 @@ def _fetch_infrahub_tags(client: httpx.Client) -> dict[str, str]: Returns: Dict mapping version strings (e.g. "1.8.4") to commit SHAs. + """ tags = _paginated_get(client, f"{API_BASE}/repos/{INFRAHUB_REPO}/tags?per_page=100") result = {} @@ -90,6 +92,7 @@ def _fetch_sdk_tags(client: httpx.Client) -> dict[str, str]: Returns: Dict mapping commit SHAs to version strings (e.g. "1.19.0"). + """ tags = _paginated_get(client, f"{API_BASE}/repos/{SDK_REPO}/tags?per_page=100") result = {} @@ -109,6 +112,7 @@ def _get_submodule_sha(client: httpx.Client, commit_sha: str) -> str | None: Returns: The submodule commit SHA, or None if not found. + """ resp = client.get(f"{API_BASE}/repos/{INFRAHUB_REPO}/git/trees/{commit_sha}") resp.raise_for_status() @@ -128,6 +132,7 @@ def _get_commit_date(client: httpx.Client, repo: str, sha: str) -> str: Returns: Date string in YYYY-MM-DD format. + """ resp = client.get(f"{API_BASE}/repos/{repo}/git/commits/{sha}") resp.raise_for_status() @@ -153,6 +158,7 @@ def _find_nearest_sdk_version( Returns: SDK version string, or None if no version could be resolved. + """ if submodule_sha in sdk_tag_map: return sdk_tag_map[submodule_sha] @@ -181,6 +187,7 @@ def _version_sort_key(version: str) -> tuple[int, ...]: Returns: Tuple of integers for comparison. + """ return tuple(int(x) for x in version.split(".")) @@ -199,6 +206,7 @@ def _derive_version_ranges( Returns: List of (infrahub_pattern, min_sdk, month_year) tuples, sorted by version descending. + """ groups: dict[str, list[tuple[str, str]]] = defaultdict(list) for infrahub_ver, sdk_ver, date in release_mappings: @@ -228,6 +236,7 @@ def _format_release_mappings(mappings: list[tuple[str, str, str]]) -> str: Returns: Python source for the RELEASE_MAPPINGS list assignment. + """ lines = [ "# Detailed mapping of every Infrahub release to its pinned SDK version.", @@ -248,6 +257,7 @@ def _format_version_ranges(ranges: list[tuple[str, str, str]]) -> str: Returns: Python source for the VERSION_RANGES list assignment. + """ lines = [ "# Mapping of Infrahub minor version series to minimum SDK versions.", @@ -266,6 +276,7 @@ def _update_file(release_mappings: list[tuple[str, str, str]]) -> None: Args: release_mappings: List of (infrahub_version, sdk_version, date) tuples, sorted by version descending. + """ content = COMPATIBILITY_FILE.read_text() diff --git a/infrahub_sdk/_importer.py b/infrahub_sdk/_importer.py index 27af0970..f51a9cd0 100644 --- a/infrahub_sdk/_importer.py +++ b/infrahub_sdk/_importer.py @@ -20,6 +20,7 @@ def import_module(module_path: Path, import_root: str | None = None, relative_pa module_path (Path): Absolute path of the module to import. import_root (Optional[str]): Absolute string path to the current repository. relative_path (Optional[str]): Relative string path between module_path and import_root. + """ import_root = import_root or str(module_path.parent) diff --git a/infrahub_sdk/checks.py b/infrahub_sdk/checks.py index a68ce0d4..0c275db3 100644 --- a/infrahub_sdk/checks.py +++ b/infrahub_sdk/checks.py @@ -133,7 +133,6 @@ def log_entries(self) -> str: @property def branch_name(self) -> str: """Return the name of the current git branch.""" - if self.branch: return self.branch @@ -149,13 +148,12 @@ def validate(self, data: dict) -> None: async def collect_data(self) -> dict: """Query the result of the GraphQL Query defined in self.query and return the result""" - return await self.client.query_gql_query(name=self.query, branch_name=self.branch_name, variables=self.params) async def run(self, data: dict | None = None) -> bool: """Execute the check after collecting the data from the GraphQL query. - The result of the check is determined based on the presence or not of ERROR log messages.""" - + The result of the check is determined based on the presence or not of ERROR log messages. + """ if not data: data = await self.collect_data() unpacked = data.get("data") or data diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index 8317c15d..6855f4de 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -584,8 +584,8 @@ async def _process_nodes_and_relationships( ProcessRelationsNodeSync: A TypedDict containing two lists: - 'nodes': A list of InfrahubNode objects representing the nodes processed. - 'related_nodes': A list of InfrahubNode objects representing the related nodes - """ + """ nodes: list[InfrahubNode] = [] related_nodes: list[InfrahubNode] = [] @@ -721,6 +721,7 @@ async def all( Returns: list[InfrahubNode]: List of Nodes + """ if query_name is None: query_name = f"All_{get_kind_as_string(kind=kind)}" @@ -833,6 +834,7 @@ async def filters( # noqa: C901 Returns: list[InfrahubNodeSync]: List of Nodes that match the given filters. + """ branch = branch or self.default_branch schema = await self.schema.get(kind=kind, branch=branch) @@ -957,6 +959,7 @@ async def execute_graphql( Returns: dict: The GraphQL data payload (response["data"]). + """ branch_name = branch_name or self.default_branch url = self._graphql_url(branch_name=branch_name, at=at) @@ -1040,6 +1043,7 @@ async def _execute_graphql_with_file( Returns: dict: The GraphQL data payload (response["data"]). + """ branch_name = branch_name or self.default_branch url = self._graphql_url(branch_name=branch_name) @@ -1146,6 +1150,7 @@ async def _post( Raises: ServerNotReachableError if we are not able to connect to the server ServerNotResponsiveError if the server didn't respond before the timeout expired + """ await self.login() @@ -1168,6 +1173,7 @@ async def _get(self, url: str, headers: dict | None = None, timeout: int | None Raises: ServerNotReachableError if we are not able to connect to the server ServerNotResponsiveError if the server didnd't respond before the timeout expired + """ await self.login() @@ -1194,6 +1200,7 @@ async def _get_streaming( Raises: ServerNotReachableError if we are not able to connect to the server ServerNotResponsiveError if the server didn't respond before the timeout expired + """ await self.login() @@ -1543,8 +1550,10 @@ async def allocate_next_ip_address( branch (str, optional): Name of the branch to allocate from. Defaults to default_branch. timeout (int, optional): Flag to indicate whether to populate the store with the retrieved nodes. tracker (str, optional): The offset for pagination. + Returns: InfrahubNode: Node corresponding to the allocated resource. + """ if resource_pool.get_kind() != "CoreIPAddressPool": raise ValueError("resource_pool is not an IP address pool") @@ -1626,8 +1635,10 @@ async def allocate_next_ip_prefix( branch: Name of the branch to allocate from. Defaults to default_branch. timeout: Flag to indicate whether to populate the store with the retrieved nodes. tracker: The offset for pagination. + Returns: InfrahubNode: Node corresponding to the allocated resource. + """ if resource_pool.get_kind() != "CoreIPPrefixPool": raise ValueError("resource_pool is not an IP prefix pool") @@ -1738,13 +1749,11 @@ async def convert_object_type( branch: str | None = None, fields_mapping: dict[str, ConversionFieldInput] | None = None, ) -> InfrahubNode: - """ - Convert a given node to another kind on a given branch. `fields_mapping` keys are target fields names + """Convert a given node to another kind on a given branch. `fields_mapping` keys are target fields names and its values indicate how to fill in these fields. Any mandatory field not having an equivalent field in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-conversion for more information. """ - mapping_dict = ( {} if fields_mapping is None @@ -1864,6 +1873,7 @@ def execute_graphql( Returns: dict: The GraphQL data payload (`response["data"]`). + """ branch_name = branch_name or self.default_branch url = self._graphql_url(branch_name=branch_name, at=at) @@ -1947,6 +1957,7 @@ def _execute_graphql_with_file( Returns: dict: The GraphQL data payload (response["data"]). + """ branch_name = branch_name or self.default_branch url = self._graphql_url(branch_name=branch_name) @@ -2156,6 +2167,7 @@ def all( Returns: list[InfrahubNodeSync]: List of Nodes + """ if query_name is None: query_name = f"All_{get_kind_as_string(kind=kind)}" @@ -2200,8 +2212,8 @@ def _process_nodes_and_relationships( ProcessRelationsNodeSync: A TypedDict containing two lists: - 'nodes': A list of InfrahubNodeSync objects representing the nodes processed. - 'related_nodes': A list of InfrahubNodeSync objects representing the related nodes - """ + """ nodes: list[InfrahubNodeSync] = [] related_nodes: list[InfrahubNodeSync] = [] @@ -2309,6 +2321,7 @@ def filters( # noqa: C901 Returns: list[InfrahubNodeSync]: List of Nodes that match the given filters. + """ branch = branch or self.default_branch schema = self.schema.get(kind=kind, branch=branch) @@ -2851,8 +2864,10 @@ def allocate_next_ip_address( branch (str, optional): Name of the branch to allocate from. Defaults to default_branch. timeout (int, optional): Flag to indicate whether to populate the store with the retrieved nodes. tracker (str, optional): The offset for pagination. + Returns: InfrahubNodeSync: Node corresponding to the allocated resource. + """ if resource_pool.get_kind() != "CoreIPAddressPool": raise ValueError("resource_pool is not an IP address pool") @@ -2934,8 +2949,10 @@ def allocate_next_ip_prefix( branch (str, optional): Name of the branch to allocate from. Defaults to default_branch. timeout (int, optional): Flag to indicate whether to populate the store with the retrieved nodes. tracker (str, optional): The offset for pagination. + Returns: InfrahubNodeSync: Node corresponding to the allocated resource. + """ if resource_pool.get_kind() != "CoreIPPrefixPool": raise ValueError("resource_pool is not an IP prefix pool") @@ -2981,6 +2998,7 @@ def _get(self, url: str, headers: dict | None = None, timeout: int | None = None Raises: ServerNotReachableError if we are not able to connect to the server ServerNotResponsiveError if the server didnd't respond before the timeout expired + """ self.login() @@ -3007,6 +3025,7 @@ def _get_streaming( Raises: ServerNotReachableError if we are not able to connect to the server ServerNotResponsiveError if the server didn't respond before the timeout expired + """ self.login() @@ -3038,6 +3057,7 @@ def _post( Raises: ServerNotReachableError if we are not able to connect to the server ServerNotResponsiveError if the server didnd't respond before the timeout expired + """ self.login() @@ -3172,13 +3192,11 @@ def convert_object_type( branch: str | None = None, fields_mapping: dict[str, ConversionFieldInput] | None = None, ) -> InfrahubNodeSync: - """ - Convert a given node to another kind on a given branch. `fields_mapping` keys are target fields names + """Convert a given node to another kind on a given branch. `fields_mapping` keys are target fields names and its values indicate how to fill in these fields. Any mandatory field not having an equivalent field in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-conversion for more information. """ - mapping_dict = ( {} if fields_mapping is None diff --git a/infrahub_sdk/config.py b/infrahub_sdk/config.py index bda031dd..7efa4dcf 100644 --- a/infrahub_sdk/config.py +++ b/infrahub_sdk/config.py @@ -90,8 +90,7 @@ def settings_customise_sources( dotenv_settings: PydanticBaseSettingsSource, file_secret_settings: PydanticBaseSettingsSource, ) -> tuple[PydanticBaseSettingsSource, ...]: - """ - Customize settings sources to track which fields were explicitly provided. + """Customize settings sources to track which fields were explicitly provided. This allows us to properly handle authentication method precedence. """ @@ -138,8 +137,7 @@ def set_transport(cls, values: dict[str, Any]) -> dict[str, Any]: @model_validator(mode="before") @classmethod def validate_mix_authentication_schemes(cls, values: dict[str, Any]) -> dict[str, Any]: - """ - Handle conflicts between token and password authentication methods. + """Handle conflicts between token and password authentication methods. When both methods are present (from explicit args or environment variables), we prioritize the explicitly provided method. If we can determine which fields diff --git a/infrahub_sdk/convert_object_type.py b/infrahub_sdk/convert_object_type.py index 3e30dd2f..3d54ae41 100644 --- a/infrahub_sdk/convert_object_type.py +++ b/infrahub_sdk/convert_object_type.py @@ -19,8 +19,7 @@ class ConversionFieldValue(BaseModel): # Only one of these fields can be not None - """ - Holds the new value of the destination field during an object conversion. + """Holds the new value of the destination field during an object conversion. Use `attribute_value` to specify the new raw value of an attribute. Use `peer_id` to specify new peer of a cardinality one relationship. Use `peers_ids` to specify new peers of a cardinality many relationship. @@ -41,8 +40,7 @@ def check_only_one_field(self) -> ConversionFieldValue: class ConversionFieldInput(BaseModel): - """ - Indicates how to fill in the value of the destination field during an object conversion. + """Indicates how to fill in the value of the destination field during an object conversion. Use `source_field` to reuse the value of the corresponding field of the object being converted. Use `data` to specify the new value for the field. Use `use_default_value` to set the destination field to its schema default. diff --git a/infrahub_sdk/ctl/branch.py b/infrahub_sdk/ctl/branch.py index d309c1d5..923b8e55 100644 --- a/infrahub_sdk/ctl/branch.py +++ b/infrahub_sdk/ctl/branch.py @@ -29,6 +29,7 @@ def format_timestamp(timestamp: str) -> str: """Format ISO timestamp to 'YYYY-MM-DD HH:MM:SS'. + Args: timestamp (str): ISO fromatted timestamp @@ -37,6 +38,7 @@ def format_timestamp(timestamp: str) -> str: Raises: Any execptions returned from formatting the timestamp are propogated to the caller + """ dt = datetime.fromisoformat(timestamp.replace("Z", "+00:00")) return dt.strftime("%Y-%m-%d %H:%M:%S") @@ -54,6 +56,7 @@ async def check_git_files_changed(client: "InfrahubClient", branch: str) -> bool Raises: Any exceptions from the API call are propagated to the caller + """ url = f"{client.address}/api/diff/files?branch={branch}" resp = await client._get(url=url, timeout=client.default_timeout) @@ -135,8 +138,7 @@ def generate_proposed_change_tables(proposed_changes: list[CoreProposedChange]) @app.callback() def callback() -> None: - """ - Manage the branches in a remote Infrahub instance. + """Manage the branches in a remote Infrahub instance. List, create, merge, rebase .. """ @@ -146,7 +148,6 @@ def callback() -> None: @catch_exception(console=console) async def list_branch(_: str = CONFIG_PARAM) -> None: """List all existing branches.""" - logging.getLogger("infrahub_sdk").setLevel(logging.CRITICAL) client = initialize_client() @@ -206,7 +207,6 @@ async def create( _: str = CONFIG_PARAM, ) -> None: """Create a new branch.""" - logging.getLogger("infrahub_sdk").setLevel(logging.CRITICAL) client = initialize_client() @@ -218,7 +218,6 @@ async def create( @catch_exception(console=console) async def delete(branch_name: str, _: str = CONFIG_PARAM) -> None: """Delete a branch.""" - logging.getLogger("infrahub_sdk").setLevel(logging.CRITICAL) client = initialize_client() @@ -230,7 +229,6 @@ async def delete(branch_name: str, _: str = CONFIG_PARAM) -> None: @catch_exception(console=console) async def rebase(branch_name: str, _: str = CONFIG_PARAM) -> None: """Rebase a Branch with main.""" - logging.getLogger("infrahub_sdk").setLevel(logging.CRITICAL) client = initialize_client() @@ -242,7 +240,6 @@ async def rebase(branch_name: str, _: str = CONFIG_PARAM) -> None: @catch_exception(console=console) async def merge(branch_name: str, _: str = CONFIG_PARAM) -> None: """Merge a Branch with main.""" - logging.getLogger("infrahub_sdk").setLevel(logging.CRITICAL) client = initialize_client() @@ -254,7 +251,6 @@ async def merge(branch_name: str, _: str = CONFIG_PARAM) -> None: @catch_exception(console=console) async def validate(branch_name: str, _: str = CONFIG_PARAM) -> None: """Validate if a branch has some conflict and is passing all the tests (NOT IMPLEMENTED YET).""" - client = initialize_client() await client.branch.validate(branch_name=branch_name) console.print(f"Branch '{branch_name}' is valid.") @@ -268,7 +264,6 @@ async def report( _: str = CONFIG_PARAM, ) -> None: """Generate branch cleanup status report.""" - client = initialize_client() # Fetch branch metadata first (needed for diff creation) diff --git a/infrahub_sdk/ctl/check.py b/infrahub_sdk/ctl/check.py index c74e08a7..6b237fb1 100644 --- a/infrahub_sdk/ctl/check.py +++ b/infrahub_sdk/ctl/check.py @@ -35,9 +35,7 @@ class CheckModule: @app.callback() def callback() -> None: - """ - Execute user-defined checks. - """ + """Execute user-defined checks.""" @app.command() @@ -53,7 +51,6 @@ def run( branch: str | None = None, ) -> None: """Locate and execute all checks under the defined path.""" - log_level = "DEBUG" if debug else "INFO" format_str = "%(message)s" logging.basicConfig(level=log_level, format=format_str, datefmt="[%X]", handlers=[RichHandler()]) diff --git a/infrahub_sdk/ctl/cli_commands.py b/infrahub_sdk/ctl/cli_commands.py index 7e16408b..cfdcac79 100644 --- a/infrahub_sdk/ctl/cli_commands.py +++ b/infrahub_sdk/ctl/cli_commands.py @@ -94,7 +94,6 @@ def check( ), ) -> None: """Execute user-defined checks.""" - variables_dict = parse_cli_vars(variables) run_check( path=path, @@ -150,7 +149,6 @@ async def run( ), ) -> None: """Execute a script.""" - logging.getLogger("infrahub_sdk").setLevel(logging.CRITICAL) logging.getLogger("httpx").setLevel(logging.ERROR) logging.getLogger("httpcore").setLevel(logging.ERROR) @@ -207,8 +205,7 @@ async def _run_transform( debug: bool, repository_config: InfrahubRepositoryConfig, ) -> Any: - """ - Query GraphQL for the required data then run a transform on that data. + """Query GraphQL for the required data then run a transform on that data. Args: query_name: Name of the query to load (e.g. tags_query) @@ -217,8 +214,8 @@ async def _run_transform( branch: Name of the *infrahub* branch that should be queried for data debug: Prints debug info to the command line repository_config: Repository config object. This is used to load the graphql query from the repository. - """ + """ try: response = execute_graphql_query( query=query_name, @@ -267,7 +264,6 @@ async def render( out: str = typer.Option(None, help="Path to a file to save the result."), ) -> None: """Render a local Jinja2 Transform for debugging purpose.""" - variables_dict = parse_cli_vars(variables) repository_config = get_repository_config(find_repository_config_file()) @@ -317,7 +313,6 @@ def transform( out: str = typer.Option(None, help="Path to a file to save the result."), ) -> None: """Render a local transform (TransformPython) for debugging purpose.""" - variables_dict = parse_cli_vars(variables) repository_config = get_repository_config(find_repository_config_file()) @@ -373,7 +368,6 @@ def protocols( out: str = typer.Option("schema_protocols.py", help="Path to a file to save the result."), ) -> None: """Export Python protocols corresponding to a schema.""" - schema: dict[str, MainSchemaTypesAll] = {} if schemas: @@ -404,7 +398,6 @@ def protocols( @catch_exception(console=console) def version() -> None: """Display the version of Python and the version of the Python SDK in use.""" - console.print(f"Python: {platform.python_version()}\nPython SDK: v{sdk_version}") diff --git a/infrahub_sdk/ctl/config.py b/infrahub_sdk/ctl/config.py index a5b522b2..a46a1a72 100644 --- a/infrahub_sdk/ctl/config.py +++ b/infrahub_sdk/ctl/config.py @@ -52,7 +52,6 @@ def load(self, config_file: str | Path = "infrahubctl.toml", config_data: dict | Configuration is loaded from a config file in toml format that contains the settings, or from a dictionary of those settings passed in as "config_data" """ - if self._settings: return @@ -81,8 +80,8 @@ def load_and_exit(self, config_file: str | Path = "infrahubctl.toml", config_dat Args: config_file_name (str, optional): [description]. Defaults to "pyprojectctl.toml". config_data (dict, optional): [description]. Defaults to None. - """ + """ try: self.load(config_file=config_file, config_data=config_data) except ValidationError as exc: diff --git a/infrahub_sdk/ctl/formatters/__init__.py b/infrahub_sdk/ctl/formatters/__init__.py index bb3fe010..75292532 100644 --- a/infrahub_sdk/ctl/formatters/__init__.py +++ b/infrahub_sdk/ctl/formatters/__init__.py @@ -45,6 +45,7 @@ def detect_output_format() -> OutputFormat: Returns: ``OutputFormat.TABLE`` when stdout is connected to a terminal, ``OutputFormat.JSON`` otherwise (e.g. when piped). + """ return OutputFormat.TABLE if sys.stdout.isatty() else OutputFormat.JSON @@ -60,6 +61,7 @@ def get_formatter(output_format: OutputFormat) -> BaseFormatter: Raises: ValueError: If *output_format* is not a recognised format. + """ formatters: dict[OutputFormat, type[BaseFormatter]] = { OutputFormat.TABLE: TableFormatter, diff --git a/infrahub_sdk/ctl/formatters/base.py b/infrahub_sdk/ctl/formatters/base.py index 8ab5edf4..f3e2e1c4 100644 --- a/infrahub_sdk/ctl/formatters/base.py +++ b/infrahub_sdk/ctl/formatters/base.py @@ -31,6 +31,7 @@ def format_list( Returns: Formatted string representation of all nodes. + """ ... @@ -43,6 +44,7 @@ def format_detail(self, node: InfrahubNode, schema: MainSchemaTypesAPI) -> str: Returns: Formatted string with full node details. + """ ... @@ -61,6 +63,7 @@ def _extract_relationship_value( Returns: Display string for the relationship value. + """ rel = getattr(node, rel_name, None) if rel is None: @@ -91,6 +94,7 @@ def extract_node_data( Returns: Dict mapping field names to their string display values. + """ data: dict[str, Any] = {} @@ -115,6 +119,7 @@ def non_empty_columns(rows: list[dict[str, Any]], columns: list[str]) -> list[st Returns: Filtered list of column names with data. + """ return [col for col in columns if any(str(row.get(col, "")).strip() for row in rows)] @@ -135,6 +140,7 @@ def extract_node_detail( Returns: Dict with metadata fields (id, display_label, kind) followed by attribute and relationship values. + """ detail: dict[str, Any] = { "id": node.id or "", diff --git a/infrahub_sdk/ctl/formatters/csv.py b/infrahub_sdk/ctl/formatters/csv.py index 1b7e2d82..88438ff6 100644 --- a/infrahub_sdk/ctl/formatters/csv.py +++ b/infrahub_sdk/ctl/formatters/csv.py @@ -48,6 +48,7 @@ def format_list( Returns: CSV string with header and data rows. + """ all_columns = schema.attribute_names + schema.relationship_names rows = [extract_node_data(node, schema) for node in nodes] @@ -74,6 +75,7 @@ def format_detail(self, node: InfrahubNode, schema: MainSchemaTypesAPI) -> str: Returns: CSV string with field/value columns. + """ detail = extract_node_detail(node, schema) output = io.StringIO() diff --git a/infrahub_sdk/ctl/formatters/json.py b/infrahub_sdk/ctl/formatters/json.py index 27e029e8..60eff0fb 100644 --- a/infrahub_sdk/ctl/formatters/json.py +++ b/infrahub_sdk/ctl/formatters/json.py @@ -36,6 +36,7 @@ def format_list( Returns: JSON array string. + """ items = [extract_node_data(node, schema) for node in nodes] return json.dumps(items, indent=2, default=str) @@ -52,6 +53,7 @@ def format_detail(self, node: InfrahubNode, schema: MainSchemaTypesAPI) -> str: Returns: JSON object string. + """ detail = extract_node_detail(node, schema) return json.dumps(detail, indent=2, default=str) diff --git a/infrahub_sdk/ctl/formatters/table.py b/infrahub_sdk/ctl/formatters/table.py index 2eca125c..bfe2c18b 100644 --- a/infrahub_sdk/ctl/formatters/table.py +++ b/infrahub_sdk/ctl/formatters/table.py @@ -40,6 +40,7 @@ def format_list( Returns: Rendered table string. + """ all_columns = schema.attribute_names + schema.relationship_names rows = [extract_node_data(node, schema) for node in nodes] @@ -67,6 +68,7 @@ def format_detail(self, node: InfrahubNode, schema: MainSchemaTypesAPI) -> str: Returns: Rendered detail string. + """ detail = extract_node_detail(node, schema) @@ -114,6 +116,7 @@ def _render(renderable: Table) -> str: Returns: The rendered string output. + """ buffer = StringIO() console = Console(file=buffer, force_terminal=False, width=120) diff --git a/infrahub_sdk/ctl/formatters/yaml.py b/infrahub_sdk/ctl/formatters/yaml.py index b84c8cb2..e75c0b6d 100644 --- a/infrahub_sdk/ctl/formatters/yaml.py +++ b/infrahub_sdk/ctl/formatters/yaml.py @@ -135,6 +135,7 @@ def _related_node_ref(rel: Any) -> str | list[str] | None: Returns: A string, list of strings, or None if the relationship is unset. + """ hfid = getattr(rel, "hfid", None) if hfid: diff --git a/infrahub_sdk/ctl/graphql.py b/infrahub_sdk/ctl/graphql.py index ea0158ce..095e353a 100644 --- a/infrahub_sdk/ctl/graphql.py +++ b/infrahub_sdk/ctl/graphql.py @@ -41,14 +41,14 @@ def find_gql_files(query_path: Path) -> list[Path]: - """ - Find all files with .gql extension in the specified directory. + """Find all files with .gql extension in the specified directory. Args: query_path: Path to the directory to search for .gql files Returns: List of Path objects for all .gql files found + """ if not query_path.exists(): raise FileNotFoundError(f"File or directory not found: {query_path}") @@ -61,7 +61,6 @@ def find_gql_files(query_path: Path) -> list[Path]: def get_graphql_query(queries_path: Path, schema: GraphQLSchema) -> tuple[DefinitionNode, ...]: """Get GraphQL queries definitions from a single GraphQL file.""" - if not queries_path.exists(): raise FileNotFoundError(f"File not found: {queries_path}") if not queries_path.is_file(): @@ -95,9 +94,7 @@ def generate_result_types(directory: Path, package: PackageGenerator, fragment: @app.callback() def callback() -> None: - """ - Various GraphQL related commands. - """ + """Various GraphQL related commands.""" @app.command() @@ -107,7 +104,6 @@ async def export_schema( _: str = CONFIG_PARAM, ) -> None: """Export the GraphQL schema to a file.""" - client = initialize_client() schema_text = await client.schema.get_graphql_schema() @@ -126,7 +122,6 @@ async def generate_return_types( _: str = CONFIG_PARAM, ) -> None: """Create Pydantic Models for GraphQL query return types""" - query = Path.cwd() if query is None else query # Load the GraphQL schema diff --git a/infrahub_sdk/ctl/menu.py b/infrahub_sdk/ctl/menu.py index ff702f8d..e774f18c 100644 --- a/infrahub_sdk/ctl/menu.py +++ b/infrahub_sdk/ctl/menu.py @@ -22,9 +22,7 @@ @app.callback() def callback() -> None: - """ - Manage the menu in a remote Infrahub instance. - """ + """Manage the menu in a remote Infrahub instance.""" @app.command() @@ -36,7 +34,6 @@ async def load( _: str = CONFIG_PARAM, ) -> None: """Load one or multiple menu files into Infrahub.""" - init_logging(debug=debug) logging.getLogger("infrahub_sdk").setLevel(logging.INFO) @@ -76,7 +73,6 @@ async def validate( _: str = CONFIG_PARAM, ) -> None: """Validate one or multiple menu files.""" - init_logging(debug=debug) logging.getLogger("infrahub_sdk").setLevel(logging.INFO) diff --git a/infrahub_sdk/ctl/object/__init__.py b/infrahub_sdk/ctl/object/__init__.py index 2f075779..0dfa43bf 100644 --- a/infrahub_sdk/ctl/object/__init__.py +++ b/infrahub_sdk/ctl/object/__init__.py @@ -32,9 +32,7 @@ @app.callback() def callback() -> None: - """ - Manage objects in a remote Infrahub instance. - """ + """Manage objects in a remote Infrahub instance.""" @app.command() @@ -46,7 +44,6 @@ async def load( _: str = CONFIG_PARAM, ) -> None: """Load one or multiple objects files into Infrahub.""" - init_logging(debug=debug) logging.getLogger("infrahub_sdk").setLevel(logging.INFO) @@ -86,7 +83,6 @@ async def validate( _: str = CONFIG_PARAM, ) -> None: """Validate one or multiple objects files.""" - init_logging(debug=debug) logging.getLogger("infrahub_sdk").setLevel(logging.INFO) diff --git a/infrahub_sdk/ctl/object/update.py b/infrahub_sdk/ctl/object/update.py index 7a2f110d..cb4c79d7 100644 --- a/infrahub_sdk/ctl/object/update.py +++ b/infrahub_sdk/ctl/object/update.py @@ -97,6 +97,7 @@ async def _update_with_set_args( identifier: Object UUID or display name. set_args: List of "key=value" strings. branch: Optional target branch. + """ data = parse_set_args(set_args) schema = await client.schema.get(kind=kind, branch=branch) @@ -153,6 +154,7 @@ async def _update_with_file( client: Initialised async Infrahub client. file: Path to the YAML or JSON object file. branch: Optional target branch. + """ files = ObjectFile.load_from_disk(paths=[file]) for obj_file in files: diff --git a/infrahub_sdk/ctl/object/utils.py b/infrahub_sdk/ctl/object/utils.py index 3fbcfd50..42bf71d7 100644 --- a/infrahub_sdk/ctl/object/utils.py +++ b/infrahub_sdk/ctl/object/utils.py @@ -43,6 +43,7 @@ async def resolve_node( Raises: NodeNotFoundError: If no lookup strategy finds the node. + """ if schema is None: schema = await client.schema.get(kind=kind, branch=branch) @@ -97,6 +98,7 @@ def prepare_relationship_data(data: dict[str, Any], schema: MainSchemaTypesAPI) Returns: A new dict with relationship values in SDK-compatible format. + """ rel_names = schema.relationship_names result: dict[str, Any] = {} @@ -116,6 +118,7 @@ def _to_relationship_value(value: Any) -> Any: Returns: A value suitable for passing to the SDK's node constructor. + """ if isinstance(value, (dict, list)): return value diff --git a/infrahub_sdk/ctl/parsers.py b/infrahub_sdk/ctl/parsers.py index d8a97617..de86f0aa 100644 --- a/infrahub_sdk/ctl/parsers.py +++ b/infrahub_sdk/ctl/parsers.py @@ -18,6 +18,7 @@ def _coerce_value(value: str) -> Any: Returns: The coerced Python value. + """ # Try JSON array syntax (e.g. [["blue"], ["red"]] for cardinality-many) stripped = value.strip() @@ -67,6 +68,7 @@ def parse_set_args(set_args: list[str]) -> dict[str, Any]: Raises: typer.BadParameter: If any argument is not in key=value format. + """ result: dict[str, Any] = {} for arg in set_args: @@ -96,6 +98,7 @@ def parse_filter_args(filter_args: list[str]) -> dict[str, Any]: Raises: typer.BadParameter: If any argument is not in key=value format. + """ result: dict[str, Any] = {} for arg in filter_args: @@ -124,6 +127,7 @@ def validate_set_fields( Raises: typer.BadParameter: If any key is not a valid field name, with a message listing valid fields. + """ valid_fields = set(attribute_names) | set(relationship_names) invalid_keys = sorted(set(data.keys()) - valid_fields) diff --git a/infrahub_sdk/ctl/repository.py b/infrahub_sdk/ctl/repository.py index 1e4ca6f4..23262d9d 100644 --- a/infrahub_sdk/ctl/repository.py +++ b/infrahub_sdk/ctl/repository.py @@ -32,6 +32,7 @@ def find_repository_config_file(base_path: Path | None = None) -> Path: Raises: FileNotFoundError: If neither .infrahub.yml nor .infrahub.yaml exists. + """ if base_path is None: base_path = Path() @@ -94,8 +95,7 @@ def load_repository_config_file(repo_config_file: Path) -> dict: @app.callback() def callback() -> None: - """ - Manage the repositories in a remote Infrahub instance. + """Manage the repositories in a remote Infrahub instance. List, create, delete .. """ @@ -114,7 +114,6 @@ async def add( _: str = CONFIG_PARAM, ) -> None: """Add a new repository.""" - init_logging(debug=debug) input_data = { @@ -207,7 +206,6 @@ async def list_repositories( @app.command() async def init() -> None: """Initialize a new Infrahub repository.""" - console.print("The copier tool is not included in the Infrahub SDK CLI due to license restrictions,") console.print("please run the following command to create a new Infrahub repository project:\n") console.print("uv tool run --from 'copier' copier copy https://github.com/opsmill/infrahub-template ") diff --git a/infrahub_sdk/ctl/schema.py b/infrahub_sdk/ctl/schema.py index 631ed874..67bd9923 100644 --- a/infrahub_sdk/ctl/schema.py +++ b/infrahub_sdk/ctl/schema.py @@ -30,9 +30,7 @@ @app.callback() def callback() -> None: - """ - Manage the schema in a remote Infrahub instance. - """ + """Manage the schema in a remote Infrahub instance.""" def validate_schema_content_and_exit(client: InfrahubClient, schemas: list[SchemaFile]) -> None: @@ -131,7 +129,6 @@ async def load( _: str = CONFIG_PARAM, ) -> None: """Load one or multiple schema files into Infrahub.""" - init_logging(debug=debug) schemas_data = load_yamlfile_from_disk_and_exit(paths=schemas, file_type=SchemaFile, console=console) @@ -183,7 +180,6 @@ async def check( _: str = CONFIG_PARAM, ) -> None: """Check if schema files are valid and what would be the impact of loading them with Infrahub.""" - init_logging(debug=debug) schemas_data = load_yamlfile_from_disk_and_exit(paths=schemas, file_type=SchemaFile, console=console) diff --git a/infrahub_sdk/ctl/task.py b/infrahub_sdk/ctl/task.py index 1ffe58c1..1cacc573 100644 --- a/infrahub_sdk/ctl/task.py +++ b/infrahub_sdk/ctl/task.py @@ -82,7 +82,6 @@ async def list_tasks( _: str = CONFIG_PARAM, ) -> None: """List Infrahub tasks.""" - init_logging(debug=debug) client = initialize_client() diff --git a/infrahub_sdk/ctl/validate.py b/infrahub_sdk/ctl/validate.py index 9b6f9d6a..d8d58a74 100644 --- a/infrahub_sdk/ctl/validate.py +++ b/infrahub_sdk/ctl/validate.py @@ -24,16 +24,13 @@ @app.callback() def callback() -> None: - """ - Helper to validate the format of various files. - """ + """Helper to validate the format of various files.""" @app.command(name="schema") @catch_exception(console=console) async def validate_schema(schema: Path, _: str = CONFIG_PARAM) -> None: """Validate the format of a schema file either in JSON or YAML""" - schema_data = load_yamlfile_from_disk_and_exit(paths=[schema], file_type=SchemaFile, console=console) if not schema_data: console.print(f"[red]Unable to find {schema}") @@ -66,7 +63,6 @@ def validate_graphql( out: str = typer.Option(None, help="Path to a file to save the result."), ) -> None: """Validate the format of a GraphQL Query stored locally by executing it on a remote GraphQL endpoint""" - try: query_str = find_graphql_query(query) except QueryNotFoundError: diff --git a/infrahub_sdk/file_handler.py b/infrahub_sdk/file_handler.py index 5d32441a..779a1bb9 100644 --- a/infrahub_sdk/file_handler.py +++ b/infrahub_sdk/file_handler.py @@ -43,6 +43,7 @@ async def prepare_upload(content: bytes | Path | BinaryIO | None, name: str | No Returns: A PreparedFile containing the file object, filename, and whether it should be closed. + """ if content is None: return PreparedFile(file_object=None, filename=None, should_close=False) @@ -77,6 +78,7 @@ def prepare_upload_sync(content: bytes | Path | BinaryIO | None, name: str | Non Returns: A PreparedFile containing the file object, filename, and whether it should be closed. + """ if content is None: return PreparedFile(file_object=None, filename=None, should_close=False) @@ -105,6 +107,7 @@ def handle_error_response(exc: httpx.HTTPStatusError) -> None: AuthenticationError: If authentication fails (401/403). NodeNotFoundError: If the file/node is not found (404). httpx.HTTPStatusError: For other HTTP errors. + """ if exc.response.status_code in {401, 403}: response = exc.response.json() @@ -130,6 +133,7 @@ def handle_response(resp: httpx.Response) -> bytes: Raises: AuthenticationError: If authentication fails. NodeNotFoundError: If the file is not found. + """ try: resp.raise_for_status() @@ -150,6 +154,7 @@ def __init__(self, client: InfrahubClient) -> None: Args: client: The async Infrahub client instance. + """ self._client = client @@ -162,6 +167,7 @@ def _build_url(self, node_id: str, branch: str | None) -> str: Returns: The complete URL for downloading the file. + """ url = f"{self._client.address}/api/storage/files/{node_id}" if branch: @@ -193,6 +199,7 @@ async def download(self, node_id: str, branch: str | None, dest: Path | None = N ServerNotReachableError: If the server is not reachable. AuthenticationError: If authentication fails. NodeNotFoundError: If the node/file is not found. + """ effective_branch = branch or self._client.default_branch url = self._build_url(node_id=node_id, branch=effective_branch) @@ -222,6 +229,7 @@ async def _stream_to_file(self, url: str, dest: Path) -> int: ServerNotReachableError: If the server is not reachable. AuthenticationError: If authentication fails. NodeNotFoundError: If the file is not found. + """ try: async with self._client._get_streaming(url=url) as resp: @@ -255,6 +263,7 @@ def __init__(self, client: InfrahubClientSync) -> None: Args: client: The sync Infrahub client instance. + """ self._client = client @@ -267,6 +276,7 @@ def _build_url(self, node_id: str, branch: str | None) -> str: Returns: The complete URL for downloading the file. + """ url = f"{self._client.address}/api/storage/files/{node_id}" if branch: @@ -298,6 +308,7 @@ def download(self, node_id: str, branch: str | None, dest: Path | None = None) - ServerNotReachableError: If the server is not reachable. AuthenticationError: If authentication fails. NodeNotFoundError: If the node/file is not found. + """ effective_branch = branch or self._client.default_branch url = self._build_url(node_id=node_id, branch=effective_branch) @@ -327,6 +338,7 @@ def _stream_to_file(self, url: str, dest: Path) -> int: ServerNotReachableError: If the server is not reachable. AuthenticationError: If authentication fails. NodeNotFoundError: If the file is not found. + """ try: with self._client._get_streaming(url=url) as resp: diff --git a/infrahub_sdk/generator.py b/infrahub_sdk/generator.py index 0b1efc9c..379548aa 100644 --- a/infrahub_sdk/generator.py +++ b/infrahub_sdk/generator.py @@ -67,7 +67,6 @@ def client(self, value: InfrahubClient) -> None: async def collect_data(self) -> dict: """Query the result of the GraphQL Query defined in self.query and return the result""" - data = await self._init_client.query_gql_query( name=self.query, branch_name=self.branch_name, @@ -79,7 +78,6 @@ async def collect_data(self) -> dict: async def run(self, identifier: str, data: dict | None = None) -> None: """Execute the generator after collecting the data from the GraphQL query.""" - if not data: data = await self.collect_data() unpacked = data.get("data") or data diff --git a/infrahub_sdk/graphql/multipart.py b/infrahub_sdk/graphql/multipart.py index bdb1f84e..8abe33ee 100644 --- a/infrahub_sdk/graphql/multipart.py +++ b/infrahub_sdk/graphql/multipart.py @@ -37,6 +37,7 @@ def build_operations(query: str, variables: dict[str, Any]) -> str: Returns: JSON string containing the query and variables. + """ return ujson.dumps({"query": query, "variables": variables}) @@ -50,6 +51,7 @@ def build_file_map(file_key: str = "0", variable_path: str = "variables.file") - Returns: JSON string mapping the file key to the variable path. + """ return ujson.dumps({file_key: [variable_path]}) @@ -84,6 +86,7 @@ def build_payload( ... file_name="document.pdf", ... ) >>> # payload can be passed to httpx.post(..., files=payload) + """ # Ensure file variable is null (spec requirement) variables = {**variables, "file": None} diff --git a/infrahub_sdk/graphql/query_renderer.py b/infrahub_sdk/graphql/query_renderer.py index 8ee64e83..ccde2597 100644 --- a/infrahub_sdk/graphql/query_renderer.py +++ b/infrahub_sdk/graphql/query_renderer.py @@ -114,6 +114,7 @@ def render_query_with_fragments(query_str: str, fragment_files: list[str]) -> st DuplicateFragmentError: Same fragment name declared in multiple files. FragmentNotFoundError: Query references a fragment not found in any declared file. CircularFragmentError: Circular dependency detected among fragments. + """ try: query_doc = parse(query_str) @@ -163,6 +164,7 @@ def render_query(name: str, config: InfrahubRepositoryConfig, relative_path: str DuplicateFragmentError: Same fragment name declared in multiple files. FragmentNotFoundError: Query references a fragment not found in any declared file. CircularFragmentError: Circular dependency detected among fragments. + """ raw = config.get_query(name).load_query(relative_path=relative_path) try: diff --git a/infrahub_sdk/graphql/renderers.py b/infrahub_sdk/graphql/renderers.py index 5b6c2c0f..6897df87 100644 --- a/infrahub_sdk/graphql/renderers.py +++ b/infrahub_sdk/graphql/renderers.py @@ -38,6 +38,7 @@ def convert_to_graphql_as_string(value: Any, convert_enum: bool = False) -> str: '[1, 2, 3]' >>> convert_to_graphql_as_string(None) 'null' + """ if value is None: return "null" @@ -134,6 +135,7 @@ def render_query_block(data: dict, offset: int = 4, indentation: int = 4, conver >>> data = {"user": {"@alias": "u", "@filters": {"id": 123}, "name": None}} >>> render_query_block(data) [' u: user(id: 123) {', ' name', ' }'] + """ filters_key = "@filters" alias_key = "@alias" @@ -199,6 +201,7 @@ def render_input_block(data: dict, offset: int = 4, indentation: int = 4, conver >>> data = {"user": {"name": "John", "hobbies": ["reading", "coding"]}} >>> render_input_block(data) [' user: {', ' name: "John"', ' hobbies: [', ' "reading",', ' "coding",', ' ]', ' }'] + """ offset_str = " " * offset lines = [] diff --git a/infrahub_sdk/graphql/utils.py b/infrahub_sdk/graphql/utils.py index 39e4aa4e..ed15c407 100644 --- a/infrahub_sdk/graphql/utils.py +++ b/infrahub_sdk/graphql/utils.py @@ -88,7 +88,8 @@ def strip_typename_from_fragment(fragment: FragmentDefinitionNode) -> FragmentDe def get_class_def_index(module: ast.Module) -> int: """Get the index of the first class definition in the module. - It's useful to insert other classes before the first class definition.""" + It's useful to insert other classes before the first class definition. + """ for idx, item in enumerate(module.body): if isinstance(item, ast.ClassDef): return idx diff --git a/infrahub_sdk/node/attribute.py b/infrahub_sdk/node/attribute.py index 54dd99aa..7bcdbfd3 100644 --- a/infrahub_sdk/node/attribute.py +++ b/infrahub_sdk/node/attribute.py @@ -21,6 +21,7 @@ class _GraphQLPayloadAttribute(NamedTuple): (e.g. ``{"value": ...}`` or ``{"from_pool": ...}``). variables: GraphQL variable bindings for unsafe string values. needs_metadata: When ``True``, the payload needs to append property flags/objects + """ payload: dict[str, Any] @@ -44,11 +45,11 @@ class Attribute: """Represents an attribute of a Node, including its schema, value, and properties.""" def __init__(self, name: str, schema: AttributeSchemaAPI, data: Any | dict) -> None: - """ - Args: - name (str): The name of the attribute. - schema (AttributeSchema): The schema defining the attribute. - data (Union[Any, dict]): The data for the attribute, either in raw form or as a dictionary. + """Args: + name (str): The name of the attribute. + schema (AttributeSchema): The schema defining the attribute. + data (Union[Any, dict]): The data for the attribute, either in raw form or as a dictionary. + """ self.name = name self._schema = schema @@ -109,7 +110,6 @@ def value(self, value: Any) -> None: def _initialize_graphql_payload(self) -> _GraphQLPayloadAttribute: """Resolve the attribute value into a GraphQL mutation payload object.""" - # Pool-based allocation (dict data or resource-pool node) if self._from_pool is not None: return _GraphQLPayloadAttribute(payload={"from_pool": self._from_pool}, variables={}, needs_metadata=True) @@ -186,5 +186,6 @@ def is_from_pool_attribute(self) -> bool: Returns: True if the attribute value is a resource pool node or was explicitly allocated from a pool. + """ return (isinstance(self.value, CoreNodeBase) and self.value.is_resource_pool()) or self._from_pool is not None diff --git a/infrahub_sdk/node/metadata.py b/infrahub_sdk/node/metadata.py index 1fe236d8..690fd291 100644 --- a/infrahub_sdk/node/metadata.py +++ b/infrahub_sdk/node/metadata.py @@ -7,9 +7,9 @@ class NodeMetadata: """Represents metadata about a node (created_at, created_by, updated_at, updated_by).""" def __init__(self, data: dict | None = None) -> None: - """ - Args: - data: Data containing the metadata fields from the GraphQL response. + """Args: + data: Data containing the metadata fields from the GraphQL response. + """ self.created_at: str | None = None self.created_by: NodeProperty | None = None @@ -45,9 +45,9 @@ class RelationshipMetadata: """Represents metadata about a relationship edge (updated_at, updated_by).""" def __init__(self, data: dict | None = None) -> None: - """ - Args: - data: Data containing the metadata fields from the GraphQL response. + """Args: + data: Data containing the metadata fields from the GraphQL response. + """ self.updated_at: str | None = None self.updated_by: NodeProperty | None = None diff --git a/infrahub_sdk/node/node.py b/infrahub_sdk/node/node.py index 24185886..4c94f6aa 100644 --- a/infrahub_sdk/node/node.py +++ b/infrahub_sdk/node/node.py @@ -41,11 +41,11 @@ class InfrahubNodeBase: """Base class for InfrahubNode and InfrahubNodeSync""" def __init__(self, schema: MainSchemaTypesAPI, branch: str, data: dict | None = None) -> None: - """ - Args: - schema: The schema of the node. - branch: The branch where the node resides. - data: Optional data to initialize the node. + """Args: + schema: The schema of the node. + branch: The branch where the node resides. + data: Optional data to initialize the node. + """ self._schema = schema self._data = data @@ -225,6 +225,7 @@ def upload_from_path(self, path: Path) -> None: Example: node.upload_from_path(path=Path("/path/to/large_file.pdf")) + """ if not self._file_object_support: raise FeatureNotSupportedError( @@ -253,6 +254,7 @@ def upload_from_bytes(self, content: bytes | BinaryIO, name: str) -> None: >>> # Using file-like object (for large files) >>> with open("/path/to/file.bin", "rb") as f: ... node.upload_from_bytes(content=f, name="file.bin") + """ if not self._file_object_support: raise FeatureNotSupportedError( @@ -287,8 +289,8 @@ def _generate_input_data( # noqa: C901 Returns: dict[str, Dict]: Representation of an input data in dict format - """ + """ data: dict[str, Any] = {} variables: dict[str, Any] = {} @@ -595,12 +597,12 @@ def __init__( branch: str | None = None, data: dict | None = None, ) -> None: - """ - Args: - client: The client used to interact with the backend. - schema: The schema of the node. - branch: The branch where the node resides. - data: Optional data to initialize the node. + """Args: + client: The client used to interact with the backend. + schema: The schema of the node. + branch: The branch where the node resides. + data: Optional data to initialize the node. + """ self._client = client self._file_handler = FileHandler(client=client) @@ -767,6 +769,7 @@ async def download_file(self, dest: Path | None = None) -> bytes | int: >>> # Stream to file (memory-efficient for large files) >>> bytes_written = await contract.download_file(dest=Path("/tmp/contract.pdf")) + """ self._validate_file_object_support(message=FILE_DOWNLOAD_FEATURE_NOT_SUPPORTED_MESSAGE) @@ -959,8 +962,8 @@ async def generate_query_data_node( Returns: dict[str, Union[Any, Dict]]: GraphQL query in dictionary format - """ + """ data: dict[str, Any] = {} for attr_name in self._attributes: @@ -1199,6 +1202,7 @@ async def _process_relationships( related_nodes (list[InfrahubNode]): The list to which related nodes will be appended. timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds. recursive:(bool): Whether to recursively process relationships of related nodes. + """ for rel_name in self._relationships: rel = getattr(self, rel_name) @@ -1246,6 +1250,7 @@ async def get_pool_allocated_resources(self, resource: InfrahubNode) -> list[Inf Returns: list[InfrahubNode]: The allocated nodes. + """ if not self.is_resource_pool(): raise ValueError("Allocated resources can only be fetched from resource pool nodes.") @@ -1304,6 +1309,7 @@ async def get_pool_resources_utilization(self) -> list[dict[str, Any]]: Returns: list[dict[str, Any]]: A list containing the allocation numbers for each resource of the pool. + """ if not self.is_resource_pool(): raise ValueError("Pool utilization can only be fetched for resource pool nodes.") @@ -1358,6 +1364,7 @@ async def get_flat_value(self, key: str, separator: str = "__") -> Any: Examples: name__value module.object.value + """ if separator not in key: return getattr(self, key) @@ -1412,12 +1419,12 @@ def __init__( branch: str | None = None, data: dict | None = None, ) -> None: - """ - Args: - client (InfrahubClientSync): The client used to interact with the backend synchronously. - schema (MainSchemaTypes): The schema of the node. - branch (Optional[str]): The branch where the node resides. - data (Optional[dict]): Optional data to initialize the node. + """Args: + client (InfrahubClientSync): The client used to interact with the backend synchronously. + schema (MainSchemaTypes): The schema of the node. + branch (Optional[str]): The branch where the node resides. + data (Optional[dict]): Optional data to initialize the node. + """ self._client = client self._file_handler = FileHandlerSync(client=client) @@ -1582,6 +1589,7 @@ def download_file(self, dest: Path | None = None) -> bytes | int: >>> # Stream to file (memory-efficient for large files) >>> bytes_written = contract.download_file(dest=Path("/tmp/contract.pdf")) + """ self._validate_file_object_support(message=FILE_DOWNLOAD_FEATURE_NOT_SUPPORTED_MESSAGE) @@ -1769,8 +1777,8 @@ def generate_query_data_node( Returns: dict[str, Union[Any, Dict]]: GraphQL query in dictionary format - """ + """ data: dict[str, Any] = {} for attr_name in self._attributes: @@ -2011,6 +2019,7 @@ def _process_relationships( related_nodes (list[InfrahubNodeSync]): The list to which related nodes will be appended. timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds. recursive:(bool): Whether to recursively process relationships of related nodes. + """ for rel_name in self._relationships: rel = getattr(self, rel_name) @@ -2058,6 +2067,7 @@ def get_pool_allocated_resources(self, resource: InfrahubNodeSync) -> list[Infra Returns: list[InfrahubNodeSync]: The allocated nodes. + """ if not self.is_resource_pool(): raise ValueError("Allocate resources can only be fetched from resource pool nodes.") @@ -2116,6 +2126,7 @@ def get_pool_resources_utilization(self) -> list[dict[str, Any]]: Returns: list[dict[str, Any]]: A list containing the allocation numbers for each resource of the pool. + """ if not self.is_resource_pool(): raise ValueError("Pool utilization can only be fetched for resource pool nodes.") @@ -2170,6 +2181,7 @@ def get_flat_value(self, key: str, separator: str = "__") -> Any: Examples: name__value module.object.value + """ if separator not in key: return getattr(self, key) diff --git a/infrahub_sdk/node/property.py b/infrahub_sdk/node/property.py index 652aa816..f3a01550 100644 --- a/infrahub_sdk/node/property.py +++ b/infrahub_sdk/node/property.py @@ -5,9 +5,9 @@ class NodeProperty: """Represents a property of a node, typically used for metadata like display labels.""" def __init__(self, data: dict | str) -> None: - """ - Args: - data (Union[dict, str]): Data representing the node property. + """Args: + data (Union[dict, str]): Data representing the node property. + """ self.id = None self.display_label = None diff --git a/infrahub_sdk/node/related_node.py b/infrahub_sdk/node/related_node.py index 5b46a8f7..b586cd95 100644 --- a/infrahub_sdk/node/related_node.py +++ b/infrahub_sdk/node/related_node.py @@ -18,12 +18,12 @@ class RelatedNodeBase: """Base class for representing a related node in a relationship.""" def __init__(self, branch: str, schema: RelationshipSchemaAPI, data: Any | dict, name: str | None = None) -> None: - """ - Args: - branch (str): The branch where the related node resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Data representing the related node. - name (Optional[str]): The name of the related node. + """Args: + branch (str): The branch where the related node resides. + schema (RelationshipSchema): The schema of the relationship. + data (Union[Any, dict]): Data representing the related node. + name (Optional[str]): The name of the related node. + """ self.schema = schema self.name = name @@ -186,6 +186,7 @@ def _generate_query_data( Returns: Dict: A dictionary representing the basic structure of a GraphQL query, including the node's ID, display label, and typename. The method also includes additional properties and any peer_data provided. + """ data: dict[str, Any] = {"node": {"id": None, "hfid": None, "display_label": None, "__typename": None}} properties: dict[str, Any] = {} @@ -221,13 +222,13 @@ def __init__( data: Any | dict, name: str | None = None, ) -> None: - """ - Args: - client (InfrahubClient): The client used to interact with the backend asynchronously. - branch (str): The branch where the related node resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Data representing the related node. - name (Optional[str]): The name of the related node. + """Args: + client (InfrahubClient): The client used to interact with the backend asynchronously. + branch (str): The branch where the related node resides. + schema (RelationshipSchema): The schema of the relationship. + data (Union[Any, dict]): Data representing the related node. + name (Optional[str]): The name of the related node. + """ self._client = client super().__init__(branch=branch, schema=schema, data=data, name=name) @@ -268,13 +269,13 @@ def __init__( data: Any | dict, name: str | None = None, ) -> None: - """ - Args: - client (InfrahubClientSync): The client used to interact with the backend synchronously. - branch (str): The branch where the related node resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Data representing the related node. - name (Optional[str]): The name of the related node. + """Args: + client (InfrahubClientSync): The client used to interact with the backend synchronously. + branch (str): The branch where the related node resides. + schema (RelationshipSchema): The schema of the relationship. + data (Union[Any, dict]): Data representing the related node. + name (Optional[str]): The name of the related node. + """ self._client = client super().__init__(branch=branch, schema=schema, data=data, name=name) diff --git a/infrahub_sdk/node/relationship.py b/infrahub_sdk/node/relationship.py index dcd33c9c..5915ae81 100644 --- a/infrahub_sdk/node/relationship.py +++ b/infrahub_sdk/node/relationship.py @@ -23,11 +23,11 @@ class RelationshipManagerBase: """Base class for RelationshipManager and RelationshipManagerSync""" def __init__(self, name: str, branch: str, schema: RelationshipSchemaAPI) -> None: - """ - Args: - name (str): The name of the relationship. - branch (str): The branch where the relationship resides. - schema (RelationshipSchema): The schema of the relationship. + """Args: + name (str): The name of the relationship. + branch (str): The branch where the relationship resides. + schema (RelationshipSchema): The schema of the relationship. + """ self.initialized: bool = False self._has_update: bool = False @@ -88,6 +88,7 @@ def _generate_query_data( Dict: A dictionary representing the basic structure of a GraphQL query for multiple related nodes. It includes count, edges, and node information (ID, display label, and typename), along with additional properties and any peer_data provided. + """ data: dict[str, Any] = { "count": None, @@ -124,14 +125,14 @@ def __init__( schema: RelationshipSchemaAPI, data: Any | dict, ) -> None: - """ - Args: - name (str): The name of the relationship. - client (InfrahubClient): The client used to interact with the backend. - node (InfrahubNode): The node to which the relationship belongs. - branch (str): The branch where the relationship resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Initial data for the relationships. + """Args: + name (str): The name of the relationship. + client (InfrahubClient): The client used to interact with the backend. + node (InfrahubNode): The node to which the relationship belongs. + branch (str): The branch where the relationship resides. + schema (RelationshipSchema): The schema of the relationship. + data (Union[Any, dict]): Initial data for the relationships. + """ self.client = client self.node = node @@ -247,14 +248,14 @@ def __init__( schema: RelationshipSchemaAPI, data: Any | dict, ) -> None: - """ - Args: - name (str): The name of the relationship. - client (InfrahubClientSync): The client used to interact with the backend synchronously. - node (InfrahubNodeSync): The node to which the relationship belongs. - branch (str): The branch where the relationship resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Initial data for the relationships. + """Args: + name (str): The name of the relationship. + client (InfrahubClientSync): The client used to interact with the backend synchronously. + node (InfrahubNodeSync): The node to which the relationship belongs. + branch (str): The branch where the relationship resides. + schema (RelationshipSchema): The schema of the relationship. + data (Union[Any, dict]): Initial data for the relationships. + """ self.client = client self.node = node diff --git a/infrahub_sdk/operation.py b/infrahub_sdk/operation.py index 7983fac5..54dca860 100644 --- a/infrahub_sdk/operation.py +++ b/infrahub_sdk/operation.py @@ -32,7 +32,6 @@ def __init__( @property def branch_name(self) -> str: """Return the name of the current git branch.""" - if self.branch: return self.branch diff --git a/infrahub_sdk/pytest_plugin/utils.py b/infrahub_sdk/pytest_plugin/utils.py index b82a2e34..f85132a5 100644 --- a/infrahub_sdk/pytest_plugin/utils.py +++ b/infrahub_sdk/pytest_plugin/utils.py @@ -19,6 +19,7 @@ def find_repository_config_file(base_path: Path | None = None) -> Path: Raises: FileNotFoundError: If neither .infrahub.yml nor .infrahub.yaml exists. + """ if base_path is None: base_path = Path() diff --git a/infrahub_sdk/query_groups.py b/infrahub_sdk/query_groups.py index 78525087..0041ee6a 100644 --- a/infrahub_sdk/query_groups.py +++ b/infrahub_sdk/query_groups.py @@ -42,6 +42,7 @@ def set_properties( Args: identifier: The new value for the identifier. params: A dictionary with new values for the params. + """ self.identifier = identifier self.params = params or {} @@ -70,7 +71,8 @@ def _generate_group_name(self, suffix: str | None = None) -> str: def _generate_group_description(self, schema: MainSchemaTypesAPI) -> str: """Generate the description of the group from the params - and ensure it's not longer than the maximum length of the description field.""" + and ensure it's not longer than the maximum length of the description field. + """ if not self.params: return "" @@ -118,12 +120,12 @@ async def delete_unused(self) -> None: raise async def add_related_nodes(self, ids: list[str], update_group_context: bool | None = None) -> None: - """ - Add related Nodes IDs to the context. + """Add related Nodes IDs to the context. Args: ids (list[str]): List of node IDs to be added. update_group_context (Optional[bool], optional): Flag to control whether to update the group context. + """ if update_group_context is not False and ( self.client.mode == InfrahubClientMode.TRACKING or self.client.update_group_context or update_group_context @@ -131,12 +133,12 @@ async def add_related_nodes(self, ids: list[str], update_group_context: bool | N self.related_node_ids.extend(ids) async def add_related_groups(self, ids: list[str], update_group_context: bool | None = None) -> None: - """ - Add related Groups IDs to the context. + """Add related Groups IDs to the context. Args: ids (list[str]): List of group IDs to be added. update_group_context (Optional[bool], optional): Flag to control whether to update the group context. + """ if update_group_context is not False and ( self.client.mode == InfrahubClientMode.TRACKING or self.client.update_group_context or update_group_context @@ -144,9 +146,7 @@ async def add_related_groups(self, ids: list[str], update_group_context: bool | self.related_group_ids.extend(ids) async def update_group(self) -> None: - """ - Create or update (using upsert) a CoreStandardGroup to store all the Nodes and Groups used during an execution. - """ + """Create or update (using upsert) a CoreStandardGroup to store all the Nodes and Groups used during an execution.""" members: list[str] = self.related_group_ids + self.related_node_ids if not members: @@ -212,12 +212,12 @@ def delete_unused(self) -> None: self.client.delete(kind=member.typename, id=member.id) def add_related_nodes(self, ids: list[str], update_group_context: bool | None = None) -> None: - """ - Add related Nodes IDs to the context. + """Add related Nodes IDs to the context. Args: ids (list[str]): List of node IDs to be added. update_group_context (Optional[bool], optional): Flag to control whether to update the group context. + """ if update_group_context is not False and ( self.client.mode == InfrahubClientMode.TRACKING or self.client.update_group_context or update_group_context @@ -225,12 +225,12 @@ def add_related_nodes(self, ids: list[str], update_group_context: bool | None = self.related_node_ids.extend(ids) def add_related_groups(self, ids: list[str], update_group_context: bool | None = None) -> None: - """ - Add related Groups IDs to the context. + """Add related Groups IDs to the context. Args: ids (list[str]): List of group IDs to be added. update_group_context (Optional[bool], optional): Flag to control whether to update the group context. + """ if update_group_context is not False and ( self.client.mode == InfrahubClientMode.TRACKING or self.client.update_group_context or update_group_context @@ -238,9 +238,7 @@ def add_related_groups(self, ids: list[str], update_group_context: bool | None = self.related_group_ids.extend(ids) def update_group(self) -> None: - """ - Create or update (using upsert) a CoreStandardGroup to store all the Nodes and Groups used during an execution. - """ + """Create or update (using upsert) a CoreStandardGroup to store all the Nodes and Groups used during an execution.""" members: list[str] = self.related_node_ids + self.related_group_ids if not members: diff --git a/infrahub_sdk/schema/__init__.py b/infrahub_sdk/schema/__init__.py index f4a67b1d..43621853 100644 --- a/infrahub_sdk/schema/__init__.py +++ b/infrahub_sdk/schema/__init__.py @@ -136,6 +136,7 @@ def _build_export_schemas( Returns: A :class:`SchemaExport` containing user-defined schemas by namespace. + """ if namespaces: restricted = set(namespaces) & set(RESTRICTED_NAMESPACES) @@ -176,12 +177,12 @@ def validate_data_against_schema(self, schema: MainSchemaTypesAPI, data: dict) - ) def set_cache(self, schema: dict[str, Any] | SchemaRootAPI | BranchSchema, branch: str | None = None) -> None: - """ - Set the cache manually (primarily for unit testing) + """Set the cache manually (primarily for unit testing) Args: schema: The schema to set the cache as provided by the /api/schema endpoint either in dict or SchemaRootAPI format branch: The name of the branch to set the cache for. + """ branch = branch or self.client.default_branch @@ -342,6 +343,7 @@ async def all( Returns: dict[str, MainSchemaTypes]: Dictionary of all schema organized by kind + """ branch = branch or self.client.default_branch if refresh and branch in self.cache and schema_hash and self.cache[branch].hash == schema_hash: @@ -530,8 +532,8 @@ async def fetch( Returns: dict[str, MainSchemaTypes]: Dictionary of all schema organized by kind - """ + """ if timeout: self._deprecated_schema_timeout() @@ -563,6 +565,7 @@ async def export( Returns: A :class:`SchemaExport` containing user-defined schemas by namespace. + """ branch = branch or self.client.default_branch schema_nodes = await self.fetch(branch=branch, namespaces=namespaces, populate_cache=False) @@ -576,6 +579,7 @@ async def get_graphql_schema(self, branch: str | None = None) -> str: Returns: The GraphQL schema as a string. + """ branch = branch or self.client.default_branch url = f"{self.client.address}/schema.graphql?branch={branch}" @@ -623,6 +627,7 @@ def all( Returns: dict[str, MainSchemaTypes]: Dictionary of all schema organized by kind + """ branch = branch or self.client.default_branch if refresh and branch in self.cache and schema_hash and self.cache[branch].hash == schema_hash: @@ -640,8 +645,7 @@ def get( refresh: bool = False, timeout: int | None = None, ) -> MainSchemaTypesAPI: - """ - Retrieve a specific schema object from the server. + """Retrieve a specific schema object from the server. Args: kind: The kind of schema object to retrieve. @@ -651,6 +655,7 @@ def get( Returns: MainSchemaTypes: The schema object. + """ branch = branch or self.client.default_branch @@ -799,6 +804,7 @@ def fetch( Returns: dict[str, MainSchemaTypes]: Dictionary of all schema organized by kind + """ if timeout: self._deprecated_schema_timeout() @@ -831,6 +837,7 @@ def export( Returns: A :class:`SchemaExport` containing user-defined schemas by namespace. + """ branch = branch or self.client.default_branch schema_nodes = self.fetch(branch=branch, namespaces=namespaces, populate_cache=False) @@ -844,6 +851,7 @@ def get_graphql_schema(self, branch: str | None = None) -> str: Returns: The GraphQL schema as a string. + """ branch = branch or self.client.default_branch url = f"{self.client.address}/schema.graphql?branch={branch}" diff --git a/infrahub_sdk/schema/main.py b/infrahub_sdk/schema/main.py index 849ffca5..fd98e1e5 100644 --- a/infrahub_sdk/schema/main.py +++ b/infrahub_sdk/schema/main.py @@ -296,14 +296,16 @@ def supports_artifact_definition(self) -> bool: def supports_artifacts(self) -> bool: """Returns True if this schema supports artifact operations via CoreArtifactTarget inheritance. Only NodeSchemaAPI overrides this; all other schema types return False by design because - artifact capability is tied to node inheritance, not profiles, templates, or generics.""" + artifact capability is tied to node inheritance, not profiles, templates, or generics. + """ return False @property def supports_file_object(self) -> bool: """Returns True if this schema supports file object operations via CoreFileObject inheritance. Only NodeSchemaAPI overrides this; all other schema types return False by design because - file object capability is tied to node inheritance, not profiles, templates, or generics.""" + file object capability is tied to node inheritance, not profiles, templates, or generics. + """ return False @property @@ -314,7 +316,8 @@ def supports_hierarchy(self) -> bool: @property def hierarchical_relationship_schemas(self) -> list[RelationshipSchemaAPI]: """Returns pseudo-schemas for parent/children/ancestors/descendants if hierarchy is set. - Only NodeSchemaAPI overrides this; all other schema types return an empty list.""" + Only NodeSchemaAPI overrides this; all other schema types return an empty list. + """ return [] @@ -437,16 +440,12 @@ class BranchSchema(BaseModel): @classmethod def from_api_response(cls, data: MutableMapping[str, Any]) -> Self: - """ - Convert an API response from /api/schema into a BranchSchema object. - """ + """Convert an API response from /api/schema into a BranchSchema object.""" return cls.from_schema_root_api(data=SchemaRootAPI(**data)) @classmethod def from_schema_root_api(cls, data: SchemaRootAPI) -> Self: - """ - Convert a SchemaRootAPI object to a BranchSchema object. - """ + """Convert a SchemaRootAPI object to a BranchSchema object.""" nodes: MutableMapping[str, GenericSchemaAPI | NodeSchemaAPI | ProfileSchemaAPI | TemplateSchemaAPI] = {} for node in data.nodes: nodes[node.kind] = node diff --git a/infrahub_sdk/spec/object.py b/infrahub_sdk/spec/object.py index b7240625..cf59d898 100644 --- a/infrahub_sdk/spec/object.py +++ b/infrahub_sdk/spec/object.py @@ -47,6 +47,7 @@ def normalize_hfid_reference(value: str | list[str]) -> str | list[str]: - If value is already a list: returns it unchanged as list[str] - If value is a valid UUID string: returns it unchanged as str (will be treated as an ID) - If value is a non-UUID string: wraps it in a list as list[str] (single-component HFID) + """ if isinstance(value, list): return value @@ -143,9 +144,7 @@ def find_matching_relationship( async def get_relationship_info( client: InfrahubClient, schema: MainSchemaTypesAPI, name: str, value: Any, branch: str | None = None ) -> RelationshipInfo: - """ - Get the relationship info for a given relationship name. - """ + """Get the relationship info for a given relationship name.""" rel_schema = schema.get_relationship(name=name) info = RelationshipInfo(name=name, peer_kind=rel_schema.peer, rel_schema=rel_schema) @@ -211,7 +210,6 @@ class InfrahubObjectFileData(BaseModel): async def _get_processed_data(self, data: list[dict[str, Any]]) -> list[dict[str, Any]]: """Get data processed according to the strategy""" - return await DataProcessorFactory.process_data(kind=self.kind, parameters=self.parameters, data=data) async def validate_format(self, client: InfrahubClient, branch: str | None = None) -> list[ObjectValidationError]: diff --git a/infrahub_sdk/spec/range_expansion.py b/infrahub_sdk/spec/range_expansion.py index 5a522f0d..46c893d2 100644 --- a/infrahub_sdk/spec/range_expansion.py +++ b/infrahub_sdk/spec/range_expansion.py @@ -108,6 +108,7 @@ def range_expansion(interface_pattern: str) -> list[str]: ['GigabitEtherneta/0/1', 'GigabitEthernetb/0/1', 'GigabitEthernetc/0/1'] >>> range_expansion("Eth[a,c,e]/0/1") ['Etha/0/1', 'Ethc/0/1', 'Ethe/0/1'] + """ pattern_escaped = _escape_brackets(interface_pattern) re_compiled = re.compile(MATCH_PATTERN) diff --git a/infrahub_sdk/task/manager.py b/infrahub_sdk/task/manager.py index d50ff05d..500299e2 100644 --- a/infrahub_sdk/task/manager.py +++ b/infrahub_sdk/task/manager.py @@ -97,8 +97,8 @@ async def count(self, filters: TaskFilter | None = None) -> int: Returns: The number of tasks. - """ + """ query = self._generate_count_query(filters=filters) response = await self.client.execute_graphql( query=query.render(convert_enum=False), tracker="query-tasks-count" @@ -126,8 +126,8 @@ async def all( Returns: A list of tasks. - """ + """ return await self.filter( limit=limit, offset=offset, @@ -160,6 +160,7 @@ async def filter( Returns: A list of tasks. + """ if filter is None: filter = TaskFilter() @@ -215,6 +216,7 @@ async def wait_for_completion(self, id: str, interval: int = 1, timeout: int = 6 Returns: The task object. + """ for _ in range(timeout // interval): task = await self.get(id=id) @@ -237,8 +239,8 @@ async def process_page( Returns: A tuple containing a list of tasks and the count of tasks. - """ + """ response = await client.execute_graphql( query=query.render(convert_enum=False), tracker=f"query-tasks-page{page_number}", @@ -332,8 +334,8 @@ def count(self, filters: TaskFilter | None = None) -> int: Returns: The number of tasks. - """ + """ query = self._generate_count_query(filters=filters) response = self.client.execute_graphql(query=query.render(convert_enum=False), tracker="query-tasks-count") return int(response["InfrahubTask"]["count"]) @@ -359,8 +361,8 @@ def all( Returns: A list of tasks. - """ + """ return self.filter( limit=limit, offset=offset, @@ -393,6 +395,7 @@ def filter( Returns: A list of tasks. + """ if filter is None: filter = TaskFilter() @@ -448,6 +451,7 @@ def wait_for_completion(self, id: str, interval: int = 1, timeout: int = 60) -> Returns: The task object. + """ for _ in range(timeout // interval): task = self.get(id=id) @@ -470,8 +474,8 @@ def process_page( Returns: A tuple containing a list of tasks and the count of tasks. - """ + """ response = client.execute_graphql( query=query.render(convert_enum=False), tracker=f"query-tasks-page{page_number}", diff --git a/infrahub_sdk/testing/docker.py b/infrahub_sdk/testing/docker.py index dc76b3a3..a8e1906f 100644 --- a/infrahub_sdk/testing/docker.py +++ b/infrahub_sdk/testing/docker.py @@ -12,9 +12,7 @@ def skip_version(min_infrahub_version: str | None = None, max_infrahub_version: str | None = None) -> bool: - """ - Check if a test should be skipped depending on Infrahub version. - """ + """Check if a test should be skipped depending on Infrahub version.""" if INFRAHUB_VERSION is None: return True diff --git a/infrahub_sdk/transforms.py b/infrahub_sdk/transforms.py index ee17605f..bd663d71 100644 --- a/infrahub_sdk/transforms.py +++ b/infrahub_sdk/transforms.py @@ -55,7 +55,6 @@ def transform(self, data: dict) -> Any: async def collect_data(self) -> dict: """Query the result of the GraphQL Query defined in self.query and return the result""" - return await self.client.query_gql_query(name=self.query, branch_name=self.branch_name) async def run(self, data: dict | None = None) -> Any: @@ -67,8 +66,8 @@ async def run(self, data: dict | None = None) -> Any: data: The data on which to run the transform. Data will be queried from the API if not provided Returns: Transformed data - """ + """ if not data: data = await self.collect_data() diff --git a/infrahub_sdk/utils.py b/infrahub_sdk/utils.py index de9bd625..ab2c7469 100644 --- a/infrahub_sdk/utils.py +++ b/infrahub_sdk/utils.py @@ -104,7 +104,6 @@ def generate_uuid() -> str: def duplicates(input_list: list) -> list: """Identify and return all the duplicates in a list.""" - dups = [] clean_input_list = [item for item in input_list or [] if item is not None] @@ -127,7 +126,6 @@ def compare_lists(list1: list[Any], list2: list[Any]) -> tuple[list[Any], list[A - the item present only in list1 - the item present only in list2 """ - in_both = intersection(list1=list1, list2=list2) in_list_1 = list(set(list1) - set(in_both)) in_list_2 = list(set(list2) - set(in_both)) @@ -163,7 +161,6 @@ def deep_merge_dict(dicta: dict, dictb: dict, path: list | None = None) -> dict: def str_to_bool(value: str | bool | int) -> bool: """Convert a String to a Boolean""" - if isinstance(value, bool): return value @@ -272,7 +269,6 @@ async def extract_fields(selection_set: SelectionSetNode | None) -> dict[str, di In the future we'll probably need to redesign how we read GraphQL queries to generate better Database query. """ - if not selection_set: return None @@ -309,7 +305,8 @@ async def extract_fields_first_node(info: GraphQLResolveInfo) -> dict[str, dict] def write_to_file(path: Path, value: Any) -> bool: """Write a given value into a file and return if the operation was successful. - If the file does not exist, the function will attempt to create it.""" + If the file does not exist, the function will attempt to create it. + """ if not path.exists(): path.touch() diff --git a/pyproject.toml b/pyproject.toml index b1708471..1943e09f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -298,11 +298,13 @@ task-tags = ["FIXME", "TODO", "XXX"] select = ["ALL"] ignore = [ - "D", # pydocstyle - "DOC", # pydoclint + #"D", # pydocstyle + #"DOC", # pydoclint "CPY", # flake8-copyright "T201", # use of `print` "COM812", # missing-trailing-comma + "D203", # incorrect-blank-line-before-class (incompatible with D211) + "D213", # multi-line-summary-second-line (incompatible with D212) ################################################################################################## # Rules below needs to be Investigated # @@ -335,6 +337,31 @@ ignore = [ "S311", # Standard pseudo-random generators are not suitable for cryptographic purposes "TC003", # Move standard library import `collections.abc.Iterable` into a type-checking block "UP031", # Use format specifiers instead of percent format + + ################################################################################################## + # Documentation rules (D and DOC). Listed individually so they can be evaluated and removed # + # one-by-one as docstrings are added/fixed, instead of blocking the entire D and DOC rule sets. # + ################################################################################################## + "D100", # Missing docstring in public module + "D101", # Missing docstring in public class + "D102", # Missing docstring in public method + "D103", # Missing docstring in public function + "D104", # Missing docstring in public package + "D105", # Missing docstring in magic method + "D107", # Missing docstring in `__init__` + "D205", # Missing blank line after summary + "D301", # Use `r"""` if any backslashes in a docstring + "D400", # First line should end with a period + "D401", # First line of docstring should be in imperative mood + "D404", # First word of the docstring should not be "This" + "D415", # First line should end with a period, question mark, or exclamation point + "D417", # Missing argument description in the docstring + "D420", # Section name should end with a newline + "DOC102", # Docstring contains extraneous parameter(s) + "DOC201", # `return` is not documented in docstring + "DOC402", # `yield` is not documented in docstring + "DOC501", # Raised exception missing from docstring + "DOC502", # Raised exception is not explicitly raised ] diff --git a/tasks.py b/tasks.py index dbd4ca32..ec91e909 100644 --- a/tasks.py +++ b/tasks.py @@ -272,7 +272,6 @@ def _generate_sdk_api_docs(context: Context) -> None: @task def format(context: Context) -> None: """Run RUFF to format all Python files.""" - exec_cmds = ["ruff format .", "ruff check . --fix"] with context.cd(MAIN_DIRECTORY_PATH): for cmd in exec_cmds: diff --git a/tests/integration/test_infrahubctl.py b/tests/integration/test_infrahubctl.py index e2fa8171..35971df5 100644 --- a/tests/integration/test_infrahubctl.py +++ b/tests/integration/test_infrahubctl.py @@ -82,7 +82,6 @@ def ctl_client_config(self, client: InfrahubClient) -> Generator[None, None, Non def test_infrahubctl_transform_cmd_animal_person(self, repository: str, base_dataset: None) -> None: """Test infrahubctl transform without converting nodes.""" - with change_directory(repository): ethans_output = runner.invoke(app, ["transform", "animal_person", "name=Ethan Carter"]) structured_ethan_output = json.loads(strip_color(ethans_output.stdout)) @@ -95,7 +94,6 @@ def test_infrahubctl_transform_cmd_animal_person(self, repository: str, base_dat def test_infrahubctl_transform_cmd_convert_animal_person(self, repository: str, base_dataset: None) -> None: """Test infrahubctl transform when converting nodes.""" - with change_directory(repository): ethans_output = runner.invoke(app, ["transform", "animal_person_converted", "name=Ethan Carter"]) structured_ethan_output = json.loads(strip_color(ethans_output.stdout)) @@ -122,7 +120,6 @@ async def test_infrahubctl_generator_cmd_animal_tags( self, repository: str, base_dataset: None, client: InfrahubClient ) -> None: """Test infrahubctl generator without converting nodes.""" - expected_generated_tags = ["raw-ethan-carter-bella", "raw-ethan-carter-daisy", "raw-ethan-carter-luna"] initial_tags = await client.all(kind="BuiltinTag") @@ -144,7 +141,6 @@ async def test_infrahubctl_generator_cmd_animal_tags_convert_query( self, repository: str, base_dataset: None, client: InfrahubClient ) -> None: """Test infrahubctl generator with conversion of nodes.""" - expected_generated_tags = [ "converted-ethan-carter-bella", "converted-ethan-carter-daisy", diff --git a/tests/unit/ctl/formatters/test_csv.py b/tests/unit/ctl/formatters/test_csv.py index e7969ab5..edb579cd 100644 --- a/tests/unit/ctl/formatters/test_csv.py +++ b/tests/unit/ctl/formatters/test_csv.py @@ -23,6 +23,7 @@ def _make_mock_schema( Returns: MagicMock configured to behave like a MainSchemaTypesAPI object. + """ schema = MagicMock() schema.kind = kind @@ -51,6 +52,7 @@ def _make_mock_node( Returns: MagicMock configured to behave like an InfrahubNode object. + """ node = MagicMock() node.id = node_id @@ -75,6 +77,7 @@ def _parse_csv(text: str) -> list[dict[str, str]]: Returns: List of dicts keyed by header row values. + """ return list(csv.DictReader(io.StringIO(text))) diff --git a/tests/unit/ctl/formatters/test_json.py b/tests/unit/ctl/formatters/test_json.py index b7cc1caa..cfcb8cdb 100644 --- a/tests/unit/ctl/formatters/test_json.py +++ b/tests/unit/ctl/formatters/test_json.py @@ -26,6 +26,7 @@ def _make_mock_schema( Returns: MagicMock configured to behave like a MainSchemaTypesAPI object. + """ schema = MagicMock() schema.kind = kind @@ -54,6 +55,7 @@ def _make_mock_node( Returns: MagicMock configured to behave like an InfrahubNode object. + """ node = MagicMock() node.id = node_id diff --git a/tests/unit/ctl/formatters/test_table.py b/tests/unit/ctl/formatters/test_table.py index 967e9b23..d156854c 100644 --- a/tests/unit/ctl/formatters/test_table.py +++ b/tests/unit/ctl/formatters/test_table.py @@ -21,6 +21,7 @@ def _make_mock_schema( Returns: MagicMock configured to behave like a MainSchemaTypesAPI object. + """ schema = MagicMock() schema.kind = kind @@ -49,6 +50,7 @@ def _make_mock_node( Returns: MagicMock configured to behave like an InfrahubNode object. + """ node = MagicMock() node.id = node_id diff --git a/tests/unit/ctl/formatters/test_yaml.py b/tests/unit/ctl/formatters/test_yaml.py index e0a66268..e54e8642 100644 --- a/tests/unit/ctl/formatters/test_yaml.py +++ b/tests/unit/ctl/formatters/test_yaml.py @@ -23,6 +23,7 @@ def _make_mock_schema( Returns: MagicMock configured to behave like a MainSchemaTypesAPI object. + """ schema = MagicMock() schema.kind = kind @@ -52,6 +53,7 @@ def _make_mock_node( Returns: MagicMock configured to behave like an InfrahubNode object. + """ node = MagicMock() node.id = node_id diff --git a/tests/unit/ctl/object/test_utils.py b/tests/unit/ctl/object/test_utils.py index 868eb8c1..44f45007 100644 --- a/tests/unit/ctl/object/test_utils.py +++ b/tests/unit/ctl/object/test_utils.py @@ -64,7 +64,6 @@ async def test_resolve_by_default_filter(mock_client: MagicMock) -> None: async def test_resolve_by_hfid(mock_client: MagicMock) -> None: """When the schema defines ``human_friendly_id``, ``client.get(hfid=...)`` is used.""" - mock_schema = MagicMock(spec=NodeSchemaAPI) mock_schema.default_filter = None mock_schema.human_friendly_id = ["name__value"] @@ -87,7 +86,6 @@ async def test_resolve_by_hfid(mock_client: MagicMock) -> None: async def test_resolve_by_hfid_multi_component(mock_client: MagicMock) -> None: """Multi-component HFID strings (``a/b``) are split on ``/``.""" - mock_schema = MagicMock(spec=NodeSchemaAPI) mock_schema.default_filter = None mock_schema.human_friendly_id = ["site__name__value", "name__value"] @@ -110,7 +108,6 @@ async def test_resolve_by_hfid_multi_component(mock_client: MagicMock) -> None: async def test_resolve_fallback_raises(mock_client: MagicMock) -> None: """When no lookup strategy matches, NodeNotFoundError is raised directly.""" - mock_schema = MagicMock(spec=NodeSchemaAPI) mock_schema.default_filter = None mock_schema.human_friendly_id = None diff --git a/tests/unit/ctl/schema/test_schema.py b/tests/unit/ctl/schema/test_schema.py index d1d60d21..771625d3 100644 --- a/tests/unit/ctl/schema/test_schema.py +++ b/tests/unit/ctl/schema/test_schema.py @@ -23,6 +23,7 @@ def _make_node_schema(kind: str, namespace: str, name: str, description: str = " Returns: A MagicMock with spec=NodeSchemaAPI and the given property values. + """ schema = MagicMock(spec=NodeSchemaAPI) schema.kind = kind @@ -50,6 +51,7 @@ def _make_attr( Returns: A plain MagicMock with the given property values. + """ attr = MagicMock() attr.name = name @@ -71,6 +73,7 @@ def _make_rel(name: str, peer: str, cardinality: str = "one", optional: bool = T Returns: A plain MagicMock with the given property values. + """ rel = MagicMock() rel.name = name @@ -207,6 +210,7 @@ def _make_full_schema( Returns: A MagicMock configured with all schema_show-required fields. + """ schema = MagicMock() schema.kind = kind diff --git a/tests/unit/ctl/test_render_app.py b/tests/unit/ctl/test_render_app.py index a4181d78..31ab463d 100644 --- a/tests/unit/ctl/test_render_app.py +++ b/tests/unit/ctl/test_render_app.py @@ -91,7 +91,6 @@ def test_render_branch_selection( expected_branch: str, ) -> None: """Test that the render command uses the correct branch source.""" - if from_git: monkeypatch.setattr("dulwich.porcelain.active_branch", lambda _: b"git-branch") diff --git a/tests/unit/ctl/test_schema_app.py b/tests/unit/ctl/test_schema_app.py index 11fe51a2..b9fc52d5 100644 --- a/tests/unit/ctl/test_schema_app.py +++ b/tests/unit/ctl/test_schema_app.py @@ -132,7 +132,6 @@ def test_schema_load_notvalid_namespace(httpx_mock: HTTPXMock) -> None: def test_load_valid_generic_schema(httpx_mock: HTTPXMock) -> None: """A test which ensures that a generic schema is correctly loaded when loaded from infrahubctl command""" - # Arrange fixture_file = get_fixtures_dir() / "models" / "valid_generic_schema.json" diff --git a/tests/unit/doc_generation/content_gen_methods/mdx/test_mdx_code_doc.py b/tests/unit/doc_generation/content_gen_methods/mdx/test_mdx_code_doc.py index d08d7e1a..5b72418b 100644 --- a/tests/unit/doc_generation/content_gen_methods/mdx/test_mdx_code_doc.py +++ b/tests/unit/doc_generation/content_gen_methods/mdx/test_mdx_code_doc.py @@ -23,6 +23,7 @@ def _make_mock_context( are written to the output directory. calls: If provided, each executed command string is appended to this list so the caller can verify how many times ``run()`` was invoked. + """ ctx = create_autospec(Context, instance=True) diff --git a/tests/unit/doc_generation/content_gen_methods/test_command_output_method.py b/tests/unit/doc_generation/content_gen_methods/test_command_output_method.py index 9b1befcf..25d837e0 100644 --- a/tests/unit/doc_generation/content_gen_methods/test_command_output_method.py +++ b/tests/unit/doc_generation/content_gen_methods/test_command_output_method.py @@ -19,7 +19,8 @@ def build(self) -> str: class TestCommandOutputDocContentGenMethod: def test_apply_runs_command_and_reads_output(self, tmp_path: Path) -> None: """The method executes the command via context.run, then reads - the content from the temp file whose path was appended via --output.""" + the content from the temp file whose path was appended via --output. + """ output_content = "# Generated docs" # Arrange diff --git a/tests/unit/doc_generation/content_gen_methods/test_jinja2_method.py b/tests/unit/doc_generation/content_gen_methods/test_jinja2_method.py index db332bac..55cd4e8b 100644 --- a/tests/unit/doc_generation/content_gen_methods/test_jinja2_method.py +++ b/tests/unit/doc_generation/content_gen_methods/test_jinja2_method.py @@ -56,7 +56,8 @@ def test_apply_renders_with_multiple_variables(self, tmp_path: Path) -> None: def test_auto_escaping_is_disabled(self, tmp_path: Path) -> None: """HTML content in template variables must not be auto-escaped, - since the SDK Jinja2 environment does not enable autoescape.""" + since the SDK Jinja2 environment does not enable autoescape. + """ # Arrange template_file = tmp_path / "test.j2" template_file.write_text("{{ html_content }}", encoding="utf-8") diff --git a/tests/unit/sdk/graphql/test_fragment_renderer.py b/tests/unit/sdk/graphql/test_fragment_renderer.py index 7d622112..cef947df 100644 --- a/tests/unit/sdk/graphql/test_fragment_renderer.py +++ b/tests/unit/sdk/graphql/test_fragment_renderer.py @@ -186,7 +186,7 @@ def test_collect_required_fragments_direct_single() -> None: def test_collect_required_fragments_transitive() -> None: - """deviceFragment spreads interfaceFragment — both must be collected.""" + """DeviceFragment spreads interfaceFragment — both must be collected.""" index = build_fragment_index([FRAG_INTERFACE, FRAG_DEVICE]) doc = parse(QUERY_USE_DEVICE) required = collect_required_fragments(doc, index) @@ -260,7 +260,7 @@ def test_render_transitive_dependency_included() -> None: def test_render_surplus_fragment_excluded() -> None: - """portFragment is not referenced — it must not appear in output.""" + """PortFragment is not referenced — it must not appear in output.""" result = render_query_with_fragments(QUERY_USE_INTERFACE, [FRAG_INTERFACE, FRAG_PORT]) assert "fragment interfaceFragment" in result assert "fragment portFragment" not in result diff --git a/tests/unit/sdk/pool/test_attribute_from_pool.py b/tests/unit/sdk/pool/test_attribute_from_pool.py index 75d63f6d..18ec619e 100644 --- a/tests/unit/sdk/pool/test_attribute_from_pool.py +++ b/tests/unit/sdk/pool/test_attribute_from_pool.py @@ -1,5 +1,4 @@ -""" -When using from_pool on a number attribute (e.g. vlan_id), the SDK should generate: +"""When using from_pool on a number attribute (e.g. vlan_id), the SDK should generate: vlan_id: { from_pool: { id: "...", identifier: "..." } } There are two ways to request a pool allocation: diff --git a/tests/unit/sdk/spec/test_object.py b/tests/unit/sdk/spec/test_object.py index 7eee7aea..a6f69b83 100644 --- a/tests/unit/sdk/spec/test_object.py +++ b/tests/unit/sdk/spec/test_object.py @@ -346,7 +346,6 @@ async def test_hfid_normalization_in_object_loading( client_with_schema_01: InfrahubClient, test_case: HfidLoadTestCase ) -> None: """Test that HFIDs are normalized correctly based on cardinality and format.""" - root_location = {"apiVersion": "infrahub.app/v1", "kind": "Object", "spec": {"kind": "BuiltinLocation", "data": []}} location = { "apiVersion": root_location["apiVersion"], diff --git a/tests/unit/sdk/test_config.py b/tests/unit/sdk/test_config.py index fb735f78..0ba5ce6d 100644 --- a/tests/unit/sdk/test_config.py +++ b/tests/unit/sdk/test_config.py @@ -77,8 +77,8 @@ def test_password_auth_overrides_env_token_when_password_env_var_and_username_ex monkeypatch: pytest.MonkeyPatch, ) -> None: """Test that explicit username/password overrides INFRAHUB_API_TOKEN from environment when only username is provided - through Config object and password is provided through environment variable""" - + through Config object and password is provided through environment variable + """ # Set environment variable for api_token and password monkeypatch.setenv("INFRAHUB_API_TOKEN", "token-from-env") monkeypatch.setenv("INFRAHUB_PASSWORD", "testpass") diff --git a/tests/unit/sdk/test_topological_sort.py b/tests/unit/sdk/test_topological_sort.py index 06f6063a..0028d24d 100644 --- a/tests/unit/sdk/test_topological_sort.py +++ b/tests/unit/sdk/test_topological_sort.py @@ -74,8 +74,7 @@ def test_topological_sort_disjoint_2() -> None: def test_topological_sort_binary_tree() -> None: - """ - a + """A b c d e f g hi j k