|
7 | 7 | from cephadm import CephadmOrchestrator |
8 | 8 | from cephadm.upgrade import CephadmUpgrade, UpgradeState |
9 | 9 | from cephadm.ssh import HostConnectionError |
| 10 | +from cephadm.utils import ContainerInspectInfo |
10 | 11 | from orchestrator import OrchestratorError, DaemonDescription |
11 | 12 | from .fixtures import _run_cephadm, wait, with_host, with_service, \ |
12 | 13 | receive_agent_metadata, async_side_effect |
@@ -80,6 +81,30 @@ def test_upgrade_resume_clear_health_warnings(_rm_health_warning, cephadm_module |
80 | 81 | _rm_health_warning.assert_has_calls(calls_list, any_order=True) |
81 | 82 |
|
82 | 83 |
|
| 84 | +@mock.patch('cephadm.upgrade.CephadmUpgrade._get_current_version', lambda _: (17, 2, 6)) |
| 85 | +@mock.patch("cephadm.serve.CephadmServe._get_container_image_info") |
| 86 | +def test_upgrade_check_with_ceph_version(_get_img_info, cephadm_module: CephadmOrchestrator): |
| 87 | + # This test was added to avoid screwing up the image base so that |
| 88 | + # when the version was added to it it made an incorrect image |
| 89 | + # The issue caused the image to come out as |
| 90 | + # quay.io/ceph/ceph:v18:v18.2.0 |
| 91 | + # see https://tracker.ceph.com/issues/63150 |
| 92 | + _img = '' |
| 93 | + |
| 94 | + def _fake_get_img_info(img_name): |
| 95 | + nonlocal _img |
| 96 | + _img = img_name |
| 97 | + return ContainerInspectInfo( |
| 98 | + 'image_id', |
| 99 | + '18.2.0', |
| 100 | + 'digest' |
| 101 | + ) |
| 102 | + |
| 103 | + _get_img_info.side_effect = _fake_get_img_info |
| 104 | + cephadm_module.upgrade_check('', '18.2.0') |
| 105 | + assert _img == 'quay.io/ceph/ceph:v18.2.0' |
| 106 | + |
| 107 | + |
83 | 108 | @mock.patch("cephadm.serve.CephadmServe._run_cephadm", _run_cephadm('{}')) |
84 | 109 | @pytest.mark.parametrize("use_repo_digest", |
85 | 110 | [ |
|
0 commit comments