-
Notifications
You must be signed in to change notification settings - Fork 1.5k
[confcom] Add more thorough tests for --with-containers
#9428
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
562139c
850c23c
205bdc7
719e669
19286d6
02dc5fe
68ad74e
ce747e1
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,13 @@ | ||
| # -------------------------------------------------------------------------------------------- | ||
| # Copyright (c) Microsoft Corporation. All rights reserved. | ||
| # Licensed under the MIT License. See License.txt in the project root for license information. | ||
| # -------------------------------------------------------------------------------------------- | ||
|
|
||
| import os | ||
|
|
||
|
|
||
| def get_binaries_dir(): | ||
| binaries_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "bin") | ||
| if not os.path.exists(binaries_dir): | ||
| os.makedirs(binaries_dir) | ||
| return binaries_dir |
| Original file line number | Diff line number | Diff line change | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| @@ -0,0 +1,52 @@ | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| # -------------------------------------------------------------------------------------------- | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| # Copyright (c) Microsoft Corporation. All rights reserved. | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| # Licensed under the MIT License. See License.txt in the project root for license information. | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| # -------------------------------------------------------------------------------------------- | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| import hashlib | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| import json | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| import os | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| from pathlib import Path | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| import platform | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| import subprocess | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| from typing import Iterable | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| import requests | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| from azext_confcom.lib.binaries import get_binaries_dir | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| _opa_pathh = os.path.abspath(os.path.join(get_binaries_dir(), "opa")) | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| _expected_sha256 = "fe8e191d44fec33db2a3d0ca788b9f83f866d980c5371063620c3c6822792877" | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| def opa_get(): | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| opa_fetch_resp = requests.get( | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| f"https://openpolicyagent.org/downloads/latest/opa_{platform.system().lower()}_amd64") | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| f"https://openpolicyagent.org/downloads/latest/opa_{platform.system().lower()}_amd64") | |
| f"https://openpolicyagent.org/downloads/latest/opa_{platform.system().lower()}_amd64", | |
| verify=True, # Explicitly verify SSL certificates | |
| timeout=30 # Add timeout to prevent hanging | |
| ) |
Copilot
AI
Nov 14, 2025
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The assertion will cause a cryptic error message if the SHA256 hash doesn't match. Consider using a more informative error message:
actual_hash = hashlib.sha256(opa_fetch_resp.content).hexdigest()
if actual_hash != _expected_sha256:
raise ValueError(f"OPA binary hash mismatch. Expected {_expected_sha256}, got {actual_hash}")| assert hashlib.sha256(opa_fetch_resp.content).hexdigest() == _expected_sha256 | |
| actual_hash = hashlib.sha256(opa_fetch_resp.content).hexdigest() | |
| if actual_hash != _expected_sha256: | |
| raise ValueError(f"OPA binary hash mismatch. Expected {_expected_sha256}, got {actual_hash}") |
Copilot
AI
Nov 14, 2025
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The OPA binary is downloaded during package installation without verifying the authenticity or checking if the file already exists. This could lead to:
- Unnecessary network calls during every installation
- No version pinning - the download URL uses "latest" which could break installations when new OPA versions are released
- The hardcoded SHA256 hash will fail if OPA releases a new version
Consider:
- Checking if the binary already exists before downloading
- Pinning to a specific OPA version instead of "latest"
- Updating the expected SHA256 hash when upgrading OPA versions
| _expected_sha256 = "fe8e191d44fec33db2a3d0ca788b9f83f866d980c5371063620c3c6822792877" | |
| def opa_get(): | |
| opa_fetch_resp = requests.get( | |
| f"https://openpolicyagent.org/downloads/latest/opa_{platform.system().lower()}_amd64") | |
| opa_fetch_resp.raise_for_status() | |
| assert hashlib.sha256(opa_fetch_resp.content).hexdigest() == _expected_sha256 | |
| with open(_opa_pathh, "wb") as f: | |
| f.write(opa_fetch_resp.content) | |
| _OPA_VERSION = "v0.63.0" | |
| _expected_sha256 = "fe8e191d44fec33db2a3d0ca788b9f83f866d980c5371063620c3c6822792877" | |
| def opa_get(): | |
| # Check if OPA binary exists and matches expected hash | |
| if os.path.isfile(_opa_pathh): | |
| with open(_opa_pathh, "rb") as f: | |
| file_hash = hashlib.sha256(f.read()).hexdigest() | |
| if file_hash == _expected_sha256: | |
| return _opa_pathh | |
| # Download OPA binary if not present or hash mismatch | |
| url = f"https://openpolicyagent.org/downloads/{_OPA_VERSION}/opa_{platform.system().lower()}_amd64" | |
| opa_fetch_resp = requests.get(url) | |
| opa_fetch_resp.raise_for_status() | |
| assert hashlib.sha256(opa_fetch_resp.content).hexdigest() == _expected_sha256, "Downloaded OPA binary hash mismatch" | |
| with open(_opa_pathh, "wb") as f: | |
| f.write(opa_fetch_resp.content) |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,64 @@ | ||
| # -------------------------------------------------------------------------------------------- | ||
| # Copyright (c) Microsoft Corporation. All rights reserved. | ||
| # Licensed under the MIT License. See License.txt in the project root for license information. | ||
| # -------------------------------------------------------------------------------------------- | ||
|
|
||
| from typing import Any | ||
| from pydantic.dataclasses import dataclass as _dataclass, Field | ||
| from pydantic import field_serializer | ||
|
|
||
|
|
||
| # The policy model is represented as pydantic dataclasses, this makes | ||
| # serialisation to/from JSON trivial. | ||
|
|
||
| # For some collections in the model, the order has no semantic meaning | ||
| # (e.g. environment rules). We mark such fields using a custom OrderlessField | ||
| # class which is an extension of the pydantic Field class. This custom class | ||
| # just sets a metadata flag we can read later. | ||
|
|
||
| # We then also extend the dataclass decorator to sort these fields with this | ||
| # flag before serialisation and comparison. | ||
|
|
||
|
|
||
| def dataclass(cls=None, **dataclass_kwargs): | ||
| def wrap(inner_cls): | ||
|
|
||
| # This method uses a pydantic field serializer to operate on fields | ||
| # before serialisation. Here we look for "orderless" fields and sort them. | ||
| @field_serializer("*") | ||
| def _sort_orderless(self, value, info): | ||
| field = type(self).__pydantic_fields__[info.field_name] | ||
| if (field.json_schema_extra or {}).get("orderless"): | ||
| return sorted(value, key=repr) | ||
| return value | ||
| setattr(inner_cls, "_sort_orderless", _sort_orderless) | ||
|
|
||
| # This custom equality method sorts "orderless" fields before comparison. | ||
| def __eq__(self, other): | ||
| def compare_field(name, field_info): | ||
| if (field_info.json_schema_extra or {}).get("orderless"): | ||
| return ( | ||
| sorted(getattr(self, name), key=repr) == | ||
| sorted(getattr(other, name), key=repr) | ||
| ) | ||
| return getattr(self, name) == getattr(other, name) | ||
|
|
||
| return ( | ||
| type(self) is type(other) and | ||
| all( | ||
| compare_field(name, field_info) | ||
| for name, field_info in self.__pydantic_fields__.items() | ||
| ) | ||
| ) | ||
| setattr(inner_cls, "__eq__", __eq__) | ||
|
|
||
| return _dataclass(inner_cls, eq=False, **dataclass_kwargs) | ||
|
|
||
| # This adds support for using the decorator with or without parentheses. | ||
| if cls is None: | ||
| return wrap | ||
| return wrap(cls) | ||
|
|
||
|
|
||
| def OrderlessField(**kwargs: Any): | ||
| return Field(json_schema_extra={"orderless": True}, **kwargs) |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,100 @@ | ||
|
|
||
| # -------------------------------------------------------------------------------------------- | ||
| # Copyright (c) Microsoft Corporation. All rights reserved. | ||
| # Licensed under the MIT License. See License.txt in the project root for license information. | ||
| # -------------------------------------------------------------------------------------------- | ||
|
|
||
| from dataclasses import asdict | ||
| import json | ||
| from pathlib import Path | ||
| from textwrap import dedent | ||
| from typing import Union | ||
|
|
||
| from azext_confcom.lib.opa import opa_eval | ||
| from azext_confcom.lib.policy import Container, FragmentReference, Fragment, Policy | ||
| import re | ||
|
|
||
|
|
||
| # This is a single entrypoint for serializing both Policy and Fragment objects | ||
| def policy_serialize(policy: Union[Policy, Fragment]): | ||
|
|
||
| if isinstance(policy, Fragment): | ||
| return fragment_serialize(policy) | ||
|
|
||
| policy_dict = asdict(policy) | ||
| fragments_json = json.dumps(policy_dict.pop("fragments"), indent=2) | ||
| containers_json = json.dumps(policy_dict.pop("containers"), indent=2) | ||
|
|
||
| return dedent(f""" | ||
| package {policy_dict.pop('package')} | ||
|
|
||
| api_version := "{policy_dict.pop('api_version')}" | ||
| framework_version := "{policy_dict.pop('framework_version')}" | ||
|
|
||
| fragments := {fragments_json} | ||
|
|
||
| containers := {containers_json} | ||
|
|
||
| {chr(10).join(f"{key} := {str(value).lower()}" for key, value in policy_dict.items() if key.startswith("allow"))} | ||
|
|
||
| mount_device := data.framework.mount_device | ||
| unmount_device := data.framework.unmount_device | ||
| mount_overlay := data.framework.mount_overlay | ||
| unmount_overlay := data.framework.unmount_overlay | ||
| create_container := data.framework.create_container | ||
| exec_in_container := data.framework.exec_in_container | ||
| exec_external := data.framework.exec_external | ||
| shutdown_container := data.framework.shutdown_container | ||
| signal_container_process := data.framework.signal_container_process | ||
| plan9_mount := data.framework.plan9_mount | ||
| plan9_unmount := data.framework.plan9_unmount | ||
| get_properties := data.framework.get_properties | ||
| dump_stacks := data.framework.dump_stacks | ||
| runtime_logging := data.framework.runtime_logging | ||
| load_fragment := data.framework.load_fragment | ||
| scratch_mount := data.framework.scratch_mount | ||
| scratch_unmount := data.framework.scratch_unmount | ||
|
|
||
| reason := {{"errors": data.framework.errors}} | ||
| """) | ||
|
|
||
|
|
||
| def fragment_serialize(fragment: Fragment): | ||
|
|
||
| fragment_dict = asdict(fragment) | ||
| fragments_json = json.dumps(fragment_dict.pop("fragments"), indent=2) | ||
| containers_json = json.dumps(fragment_dict.pop("containers"), indent=2) | ||
|
|
||
| return dedent(f""" | ||
| package {fragment_dict.pop('package')} | ||
|
|
||
| svn := "{fragment_dict.pop('svn')}" | ||
| framework_version := "{fragment_dict.pop('framework_version')}" | ||
|
|
||
| fragments := {fragments_json} | ||
|
|
||
| containers := {containers_json} | ||
| """) | ||
|
|
||
|
|
||
| def policy_deserialize(file_path: str): | ||
|
|
||
| with open(file_path, 'r') as f: | ||
| content = f.read() | ||
|
|
||
| package_match = re.search(r'package\s+(\S+)', content) | ||
| package_name = package_match.group(1) | ||
|
|
||
| PolicyType = Policy if package_name == "policy" else Fragment | ||
|
|
||
| raw_json = opa_eval(Path(file_path), f"data.{package_name}")["result"][0]["expressions"][0]["value"] | ||
|
|
||
| raw_fragments = raw_json.pop("fragments", []) | ||
| raw_containers = raw_json.pop("containers", []) | ||
|
|
||
| return PolicyType( | ||
| package=package_name, | ||
| fragments=[FragmentReference(**fragment) for fragment in raw_fragments], | ||
| containers=[Container(**container) for container in raw_containers], | ||
| **raw_json | ||
| ) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Variable name typo:
_opa_pathhhas an extra 'h'. Should be_opa_pathfor consistency with naming conventions.