Skip to content

Commit 3c29e2f

Browse files
cephadm: move get_container_info out of cephadm.py
Now that get_container_info is clean of dependencies on functions and classes that only exist in cephadm.py we can move it. I chose to move it to a new module, container_lookup.py, because I didn't see any really good homes for it - other modules were too low level or it didn't fit thematically. Signed-off-by: John Mulligan <[email protected]>
1 parent 361c69d commit 3c29e2f

File tree

3 files changed

+107
-87
lines changed

3 files changed

+107
-87
lines changed

src/cephadm/cephadm.py

Lines changed: 1 addition & 79 deletions
Original file line numberDiff line numberDiff line change
@@ -85,12 +85,10 @@
8585
concurrent_tasks,
8686
)
8787
from cephadmlib.container_engines import (
88-
ContainerInfo,
8988
Podman,
9089
check_container_engine,
9190
find_container_engine,
9291
normalize_container_id,
93-
parsed_container_image_stats,
9492
parsed_container_mem_usage,
9593
pull_command,
9694
registry_login,
@@ -146,7 +144,6 @@
146144
InitContainer,
147145
SidecarContainer,
148146
extract_uid_gid,
149-
get_container_stats,
150147
get_mgr_images,
151148
is_container_running,
152149
)
@@ -204,6 +201,7 @@
204201
MemUsageStatusUpdater,
205202
VersionStatusUpdater,
206203
)
204+
from cephadmlib.container_lookup import get_container_info
207205

208206

209207
FuncT = TypeVar('FuncT', bound=Callable)
@@ -467,82 +465,6 @@ def update_default_image(ctx: CephadmContext) -> None:
467465
ctx.image = _get_default_image(ctx)
468466

469467

470-
def get_container_info(
471-
ctx: CephadmContext, daemon_filter: str, by_name: bool
472-
) -> Optional[ContainerInfo]:
473-
"""
474-
:param ctx: Cephadm context
475-
:param daemon_filter: daemon name or type
476-
:param by_name: must be set to True if daemon name is provided
477-
:return: Container information or None
478-
"""
479-
if by_name and '.' not in daemon_filter:
480-
logger.warning(
481-
f'Trying to get container info using invalid daemon name {daemon_filter}'
482-
)
483-
return None
484-
485-
# configure filters: fsid and (daemon name or daemon type)
486-
kwargs = {
487-
'fsid': ctx.fsid,
488-
('daemon_name' if by_name else 'daemon_type'): daemon_filter,
489-
}
490-
# use keep_container_info to cache the ContainerInfo generated
491-
# during the loop and hopefully avoid having to perform the same
492-
# lookup right away.
493-
_cinfo_key = '_container_info'
494-
_updater = CoreStatusUpdater(keep_container_info=_cinfo_key)
495-
matching_daemons = [
496-
_updater.expand(ctx, entry)
497-
for entry in daemons_matching(ctx, **kwargs)
498-
]
499-
500-
if not matching_daemons:
501-
# no matches at all
502-
logger.debug(
503-
'no daemons match: daemon_filter=%r, by_name=%r',
504-
daemon_filter,
505-
by_name,
506-
)
507-
return None
508-
if by_name and len(matching_daemons) > 1:
509-
# too many matches while searching by name
510-
logger.warning(
511-
f'Found multiple daemons sharing same name: {daemon_filter}'
512-
)
513-
# Prefer to take the first daemon we find that is actually running, or
514-
# just the first in the list if none are running
515-
# (key reminder: false (0) sorts before true (1))
516-
matching_daemons = sorted(
517-
matching_daemons, key=lambda d: d.get('state') != 'running'
518-
)
519-
520-
matched_deamon = matching_daemons[0]
521-
is_running = matched_deamon.get('state') == 'running'
522-
image_name = matched_deamon.get('container_image_name', '')
523-
if is_running:
524-
cinfo = matched_deamon.get(_cinfo_key)
525-
if cinfo:
526-
# found a cached ContainerInfo while getting daemon statuses
527-
return cinfo
528-
return get_container_stats(
529-
ctx,
530-
DaemonIdentity.from_name(
531-
matched_deamon['fsid'], matched_deamon['name']
532-
),
533-
)
534-
elif image_name:
535-
# this daemon's container is not running. the regular container inspect
536-
# command will not work. Fall back to inspecting the container image
537-
assert isinstance(image_name, str)
538-
return parsed_container_image_stats(ctx, image_name)
539-
# not running, but no image name to look up!
540-
logger.debug(
541-
'bad daemon state: no image, not running: %r', matched_deamon
542-
)
543-
return None
544-
545-
546468
def infer_local_ceph_image(ctx: CephadmContext, container_path: str) -> Optional[str]:
547469
"""
548470
Infer the local ceph image based on the following priority criteria:
Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
# container_lookup.py - high-level functions for getting container info
2+
3+
from typing import Optional
4+
5+
import logging
6+
7+
from .container_engines import ContainerInfo, parsed_container_image_stats
8+
from .container_types import get_container_stats
9+
from .context import CephadmContext
10+
from .daemon_identity import DaemonIdentity
11+
from .listing import daemons_matching
12+
from .listing_updaters import CoreStatusUpdater
13+
14+
15+
logger = logging.getLogger()
16+
17+
18+
def get_container_info(
19+
ctx: CephadmContext, daemon_filter: str, by_name: bool
20+
) -> Optional[ContainerInfo]:
21+
"""
22+
:param ctx: Cephadm context
23+
:param daemon_filter: daemon name or type
24+
:param by_name: must be set to True if daemon name is provided
25+
:return: Container information or None
26+
"""
27+
if by_name and '.' not in daemon_filter:
28+
logger.warning(
29+
f'Trying to get container info using invalid daemon name {daemon_filter}'
30+
)
31+
return None
32+
33+
# configure filters: fsid and (daemon name or daemon type)
34+
kwargs = {
35+
'fsid': ctx.fsid,
36+
('daemon_name' if by_name else 'daemon_type'): daemon_filter,
37+
}
38+
# use keep_container_info to cache the ContainerInfo generated
39+
# during the loop and hopefully avoid having to perform the same
40+
# lookup right away.
41+
_cinfo_key = '_container_info'
42+
_updater = CoreStatusUpdater(keep_container_info=_cinfo_key)
43+
matching_daemons = [
44+
_updater.expand(ctx, entry)
45+
for entry in daemons_matching(ctx, **kwargs)
46+
]
47+
48+
if not matching_daemons:
49+
# no matches at all
50+
logger.debug(
51+
'no daemons match: daemon_filter=%r, by_name=%r',
52+
daemon_filter,
53+
by_name,
54+
)
55+
return None
56+
if by_name and len(matching_daemons) > 1:
57+
# too many matches while searching by name
58+
logger.warning(
59+
f'Found multiple daemons sharing same name: {daemon_filter}'
60+
)
61+
# Prefer to take the first daemon we find that is actually running, or
62+
# just the first in the list if none are running
63+
# (key reminder: false (0) sorts before true (1))
64+
matching_daemons = sorted(
65+
matching_daemons, key=lambda d: d.get('state') != 'running'
66+
)
67+
68+
matched_deamon = matching_daemons[0]
69+
is_running = matched_deamon.get('state') == 'running'
70+
image_name = matched_deamon.get('container_image_name', '')
71+
if is_running:
72+
cinfo = matched_deamon.get(_cinfo_key)
73+
if cinfo:
74+
# found a cached ContainerInfo while getting daemon statuses
75+
return cinfo
76+
return get_container_stats(
77+
ctx,
78+
DaemonIdentity.from_name(
79+
matched_deamon['fsid'], matched_deamon['name']
80+
),
81+
)
82+
elif image_name:
83+
# this daemon's container is not running. the regular container inspect
84+
# command will not work. Fall back to inspecting the container image
85+
assert isinstance(image_name, str)
86+
return parsed_container_image_stats(ctx, image_name)
87+
# not running, but no image name to look up!
88+
logger.debug(
89+
'bad daemon state: no image, not running: %r', matched_deamon
90+
)
91+
return None

src/cephadm/tests/test_cephadm.py

Lines changed: 15 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,13 @@ def get_ceph_conf(
3939
mon_host = {mon_host}
4040
'''
4141

42+
def _container_info(*args, **kwargs):
43+
"""Wrapper function for creating container info instances."""
44+
import cephadmlib.container_engines
45+
46+
return cephadmlib.container_engines.ContainerInfo(*args, **kwargs)
47+
48+
4249
@contextlib.contextmanager
4350
def bootstrap_test_ctx(*args, **kwargs):
4451
with with_cephadm_ctx(*args, **kwargs) as ctx:
@@ -607,7 +614,7 @@ def test_infer_local_ceph_image(self, _logger, _listdir):
607614
ctx.container_engine = mock_podman()
608615

609616
# make sure the right image is selected when container is found
610-
cinfo = _cephadm.ContainerInfo('935b549714b8f007c6a4e29c758689cf9e8e69f2e0f51180506492974b90a972',
617+
cinfo = _container_info('935b549714b8f007c6a4e29c758689cf9e8e69f2e0f51180506492974b90a972',
611618
'registry.hub.docker.com/rkachach/ceph:custom-v0.5',
612619
'514e6a882f6e74806a5856468489eeff8d7106095557578da96935e4d0ba4d9d',
613620
'2022-04-19 13:45:20.97146228 +0000 UTC',
@@ -652,7 +659,7 @@ def test_infer_local_ceph_image(self, _logger, _listdir):
652659
],
653660
("935b549714b8f007c6a4e29c758689cf9e8e69f2e0f51180506492974b90a972", "registry.hub.docker.com/rkachach/ceph:custom-v0.5", "666bbfa87e8df05702d6172cae11dd7bc48efb1d94f1b9e492952f19647199a4", "2022-04-19 13:45:20.97146228 +0000 UTC", ""
654661
),
655-
_cephadm.ContainerInfo('935b549714b8f007c6a4e29c758689cf9e8e69f2e0f51180506492974b90a972',
662+
_container_info('935b549714b8f007c6a4e29c758689cf9e8e69f2e0f51180506492974b90a972',
656663
'registry.hub.docker.com/rkachach/ceph:custom-v0.5',
657664
'666bbfa87e8df05702d6172cae11dd7bc48efb1d94f1b9e492952f19647199a4',
658665
'2022-04-19 13:45:20.97146228 +0000 UTC',
@@ -668,7 +675,7 @@ def test_infer_local_ceph_image(self, _logger, _listdir):
668675
],
669676
("935b549714b8f007c6a4e29c758689cf9e8e69f2e0f51180506492974b90a972", "registry.hub.docker.com/rkachach/ceph:custom-v0.5", "666bbfa87e8df05702d6172cae11dd7bc48efb1d94f1b9e492952f19647199a4", "2022-04-19 13:45:20.97146228 +0000 UTC", ""
670677
),
671-
_cephadm.ContainerInfo('935b549714b8f007c6a4e29c758689cf9e8e69f2e0f51180506492974b90a972',
678+
_container_info('935b549714b8f007c6a4e29c758689cf9e8e69f2e0f51180506492974b90a972',
672679
'registry.hub.docker.com/rkachach/ceph:custom-v0.5',
673680
'666bbfa87e8df05702d6172cae11dd7bc48efb1d94f1b9e492952f19647199a4',
674681
'2022-04-19 13:45:20.97146228 +0000 UTC',
@@ -684,7 +691,7 @@ def test_infer_local_ceph_image(self, _logger, _listdir):
684691
],
685692
("935b549714b8f007c6a4e29c758689cf9e8e69f2e0f51180506492974b90a972", "registry.hub.docker.com/rkachach/ceph:custom-v0.5", "666bbfa87e8df05702d6172cae11dd7bc48efb1d94f1b9e492952f19647199a4", "2022-04-19 13:45:20.97146228 +0000 UTC", ""
686693
),
687-
_cephadm.ContainerInfo('935b549714b8f007c6a4e29c758689cf9e8e69f2e0f51180506492974b90a972',
694+
_container_info('935b549714b8f007c6a4e29c758689cf9e8e69f2e0f51180506492974b90a972',
688695
'registry.hub.docker.com/rkachach/ceph:custom-v0.5',
689696
'666bbfa87e8df05702d6172cae11dd7bc48efb1d94f1b9e492952f19647199a4',
690697
'2022-04-19 13:45:20.97146228 +0000 UTC',
@@ -813,7 +820,7 @@ def _dms(*args, **kwargs):
813820
ValueError
814821
)
815822
cinfo = (
816-
_cephadm.ContainerInfo(*container_stats)
823+
_container_info(*container_stats)
817824
if container_stats
818825
else None
819826
)
@@ -884,7 +891,7 @@ def _dms(*args, **kwargs):
884891
_daemons.side_effect = _dms
885892
_current_daemons[:] = [down_osd_json]
886893

887-
expected_container_info = _cephadm.ContainerInfo(
894+
expected_container_info = _container_info(
888895
container_id='',
889896
image_name='quay.io/adk3798/ceph@sha256:7da0af22ce45aac97dff00125af590506d8e36ab97d78e5175149643562bfb0b',
890897
image_id='a03c201ff4080204949932f367545cd381c4acee0d48dbc15f2eac1e35f22318',
@@ -904,10 +911,10 @@ def _dms(*args, **kwargs):
904911
# than it partially being taken from the list_daemons output
905912
up_osd_json = copy.deepcopy(down_osd_json)
906913
up_osd_json['state'] = 'running'
907-
_get_stats.return_value = _cephadm.ContainerInfo('container_id', 'image_name','image_id','the_past','')
914+
_get_stats.return_value = _container_info('container_id', 'image_name','image_id','the_past','')
908915
_current_daemons[:] = [down_osd_json, up_osd_json]
909916

910-
expected_container_info = _cephadm.ContainerInfo(
917+
expected_container_info = _container_info(
911918
container_id='container_id',
912919
image_name='image_name',
913920
image_id='image_id',

0 commit comments

Comments
 (0)