Skip to content

Commit 801e6bf

Browse files
scoopexMarc Schöchlingarloff
committed
improvements (#13)
* improvements - be more tolerant references to cloud.yaml entries - set mtu automatically if set to 0 - use directly the specified path if a file is specified Signed-off-by: Marc Schöchlin <[email protected]> * satisfy mypy and linting Signed-off-by: Marc Schöchlin <[email protected]> --------- Signed-off-by: Marc Schöchlin <[email protected]> Co-authored-by: Marc Schöchlin <[email protected]> Co-authored-by: Kurt Garloff <[email protected]> Signed-off-by: Marc Schöchlin <[email protected]>
1 parent ac2c71f commit 801e6bf

File tree

9 files changed

+540
-263
lines changed

9 files changed

+540
-263
lines changed

Makefile

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,3 +29,6 @@ test: deps
2929
${activate} && ${python} -m pytest test
3030
.PHONY: test
3131

32+
black: deps
33+
${activate} && ${python} -m black src
34+
.PHONY: black

requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,3 +10,4 @@ openstacksdk==3.3.0
1010
pytest==7.4.0
1111
mypy==1.13.0
1212
flake8==6.1.0
13+
black>=24.4.2

src/openstack_workload_generator/__main__.py

Lines changed: 123 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -31,62 +31,114 @@
3131

3232
LOGGER = logging.getLogger()
3333

34-
parser = argparse.ArgumentParser(
35-
prog='Create workloads on openstack installations')
36-
37-
parser.add_argument('--log_level', metavar='loglevel', type=str,
38-
default="INFO", help='The loglevel')
39-
40-
parser.add_argument('--os_cloud', type=cloud_checker,
41-
default=os.environ.get("OS_CLOUD", "admin"),
42-
help='The openstack config to use, defaults to the value of the OS_CLOUD '
43-
'environment variable or "admin" if the variable is not set')
44-
45-
parser.add_argument('--ansible_inventory', type=str, nargs="?",
46-
help="Dump the created servers as an ansible inventory to the specified directory, "
47-
"adds a ssh proxy jump for the hosts without a floating ip")
48-
49-
parser.add_argument('--clouds_yaml', type=str, nargs="?",
50-
help="Generate a openstack clouds.yaml file")
51-
52-
parser.add_argument('--wait_for_machines', action="store_true",
53-
help="Wait for every machine to be created "
54-
"(normally the provisioning only waits for machines which use floating ips)")
55-
56-
parser.add_argument('--config', type=str,
57-
default="default.yaml",
58-
help='The config file for environment creation, define a path to the'
59-
' yaml file or a subpath in the profiles folder')
34+
parser = argparse.ArgumentParser(prog="Create workloads on openstack installations")
35+
36+
parser.add_argument(
37+
"--log_level", metavar="loglevel", type=str, default="INFO", help="The loglevel"
38+
)
39+
40+
parser.add_argument(
41+
"--os_cloud",
42+
type=cloud_checker,
43+
default=os.environ.get("OS_CLOUD", "admin"),
44+
help="The openstack config to use, defaults to the value of the OS_CLOUD "
45+
'environment variable or "admin" if the variable is not set',
46+
)
47+
48+
parser.add_argument(
49+
"--ansible_inventory",
50+
type=str,
51+
nargs="?",
52+
help="Dump the created servers as an ansible inventory to the specified directory, "
53+
"adds a ssh proxy jump for the hosts without a floating ip",
54+
)
55+
56+
parser.add_argument(
57+
"--clouds_yaml", type=str, nargs="?", help="Use a specific clouds.yaml file"
58+
)
59+
60+
parser.add_argument(
61+
"--wait_for_machines",
62+
action="store_true",
63+
help="Wait for every machine to be created "
64+
"(normally the provisioning only waits for machines which use floating ips)",
65+
)
66+
67+
parser.add_argument(
68+
"--generate_clouds_yaml",
69+
type=str,
70+
nargs="?",
71+
help="Generate a openstack clouds.yaml file",
72+
)
73+
74+
75+
parser.add_argument(
76+
"--config",
77+
type=str,
78+
default="default.yaml",
79+
help="The config file for environment creation, define a path to the"
80+
" yaml file or a subpath in the profiles folder",
81+
)
6082

6183
exclusive_group_domain = parser.add_mutually_exclusive_group(required=True)
6284

63-
exclusive_group_domain.add_argument('--create_domains', type=item_checker, nargs="+", default=None,
64-
metavar="DOMAINNAME",
65-
help='A list of domains to be created')
66-
67-
exclusive_group_domain.add_argument('--delete_domains', type=item_checker, nargs="+", default=None,
68-
metavar="DOMAINNAME",
69-
help='A list of domains to be deleted, all child elements are recursively deleted')
85+
exclusive_group_domain.add_argument(
86+
"--create_domains",
87+
type=item_checker,
88+
nargs="+",
89+
default=None,
90+
metavar="DOMAINNAME",
91+
help="A list of domains to be created",
92+
)
93+
94+
exclusive_group_domain.add_argument(
95+
"--delete_domains",
96+
type=item_checker,
97+
nargs="+",
98+
default=None,
99+
metavar="DOMAINNAME",
100+
help="A list of domains to be deleted, all child elements are recursively deleted",
101+
)
70102

71103
exclusive_group_project = parser.add_mutually_exclusive_group(required=False)
72104

73-
exclusive_group_project.add_argument('--create_projects', type=item_checker, nargs="+", default=None,
74-
metavar="PROJECTNAME",
75-
help='A list of projects to be created in the created domains')
76-
77-
exclusive_group_project.add_argument('--delete_projects', type=item_checker, nargs="+", default=None,
78-
metavar="PROJECTNAME",
79-
help='A list of projects to be deleted in the created '
80-
'domains, all child elements are recursively deleted')
105+
exclusive_group_project.add_argument(
106+
"--create_projects",
107+
type=item_checker,
108+
nargs="+",
109+
default=None,
110+
metavar="PROJECTNAME",
111+
help="A list of projects to be created in the created domains",
112+
)
113+
114+
exclusive_group_project.add_argument(
115+
"--delete_projects",
116+
type=item_checker,
117+
nargs="+",
118+
default=None,
119+
metavar="PROJECTNAME",
120+
help="A list of projects to be deleted in the created "
121+
"domains, all child elements are recursively deleted",
122+
)
81123

82124
exclusive_group_machines = parser.add_mutually_exclusive_group(required=False)
83-
exclusive_group_machines.add_argument('--create_machines', type=item_checker, nargs="+", default=None,
84-
metavar="SERVERNAME",
85-
help='A list of vms to be created in the created domains')
86-
87-
exclusive_group_machines.add_argument('--delete_machines', type=item_checker, nargs="+", default=None,
88-
metavar="SERVERNAME",
89-
help='A list of vms to be deleted in the created projects')
125+
exclusive_group_machines.add_argument(
126+
"--create_machines",
127+
type=item_checker,
128+
nargs="+",
129+
default=None,
130+
metavar="SERVERNAME",
131+
help="A list of vms to be created in the created domains",
132+
)
133+
134+
exclusive_group_machines.add_argument(
135+
"--delete_machines",
136+
type=item_checker,
137+
nargs="+",
138+
default=None,
139+
metavar="SERVERNAME",
140+
help="A list of vms to be deleted in the created projects",
141+
)
90142

91143
args = parser.parse_args()
92144

@@ -97,7 +149,11 @@
97149

98150

99151
def establish_connection():
100-
config = loader.OpenStackConfig()
152+
if args.clouds_yaml is None:
153+
config = loader.OpenStackConfig()
154+
else:
155+
LOGGER.info(f"Loading connection configuration from {args.clouds_yaml}")
156+
config = loader.OpenStackConfig(config_files=[args.clouds_yaml])
101157
cloud_config = config.get_one(args.os_cloud)
102158
return Connection(config=cloud_config)
103159

@@ -123,20 +179,30 @@ def establish_connection():
123179
for workload_domain in workload_domains.values():
124180
for workload_project in workload_domain.get_projects(args.create_projects):
125181
if args.create_machines:
126-
workload_project.get_and_create_machines(args.create_machines, args.wait_for_machines)
182+
workload_project.get_and_create_machines(
183+
args.create_machines, args.wait_for_machines
184+
)
127185
if args.ansible_inventory:
128186
workload_project.dump_inventory_hosts(args.ansible_inventory)
129187
if args.clouds_yaml:
130-
clouds_yaml_data[f"{workload_domain.domain_name}-{workload_project.project_name}"] \
131-
= workload_project.get_clouds_yaml_data()
188+
clouds_yaml_data[
189+
f"{workload_domain.domain_name}-{workload_project.project_name}"
190+
] = workload_project.get_clouds_yaml_data()
132191
elif args.delete_machines:
133-
for machine_obj in workload_project.get_machines(args.delete_machines):
192+
for machine_obj in workload_project.get_machines(
193+
args.delete_machines
194+
):
134195
machine_obj.delete_machine()
135-
if args.clouds_yaml:
136-
LOGGER.info(f"Creating a a clouds yaml : {args.clouds_yaml}")
196+
if args.generate_clouds_yaml:
197+
LOGGER.info(f"Creating a a clouds yaml : {args.generate_clouds_yaml}")
137198
clouds_yaml_data = {"clouds": clouds_yaml_data}
138-
with open(args.clouds_yaml, 'w') as file:
139-
yaml.dump(clouds_yaml_data, file, default_flow_style=False, explicit_start=True)
199+
with open(args.generate_clouds_yaml, "w") as file:
200+
yaml.dump(
201+
clouds_yaml_data,
202+
file,
203+
default_flow_style=False,
204+
explicit_start=True,
205+
)
140206
sys.exit(0)
141207
elif args.delete_projects:
142208
conn = establish_connection()

src/openstack_workload_generator/entities/domain.py

Lines changed: 19 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,12 @@ def __init__(self, conn: Connection, domain_name: str):
1919
self.obj: Domain = self.conn.identity.find_domain(domain_name)
2020
if self.obj:
2121
DomainCache.add(self.obj.id, self.obj.name)
22-
self.workload_user = WorkloadGeneratorDomain._get_user(conn, domain_name, self.obj)
23-
self.workload_projects: dict[str, WorkloadGeneratorProject] = WorkloadGeneratorDomain._get_projects(
24-
conn, self.obj, self.workload_user)
22+
self.workload_user = WorkloadGeneratorDomain._get_user(
23+
conn, domain_name, self.obj
24+
)
25+
self.workload_projects: dict[str, WorkloadGeneratorProject] = (
26+
WorkloadGeneratorDomain._get_projects(conn, self.obj, self.workload_user)
27+
)
2528

2629
@staticmethod
2730
def _get_user(conn: Connection, domain_name: str, obj: Domain):
@@ -30,28 +33,31 @@ def _get_user(conn: Connection, domain_name: str, obj: Domain):
3033
return WorkloadGeneratorUser(conn, f"{domain_name}-admin", obj)
3134

3235
@staticmethod
33-
def _get_projects(conn: Connection, domain: Domain | None, user: WorkloadGeneratorUser | None) \
34-
-> dict[str, WorkloadGeneratorProject]:
36+
def _get_projects(
37+
conn: Connection, domain: Domain | None, user: WorkloadGeneratorUser | None
38+
) -> dict[str, WorkloadGeneratorProject]:
3539
if not domain or not user:
3640
return dict()
3741
result: dict[str, WorkloadGeneratorProject] = dict()
3842
for project in conn.identity.projects(domain_id=domain.id):
39-
result[project.name] = WorkloadGeneratorProject(conn, project.name, domain, user)
43+
result[project.name] = WorkloadGeneratorProject(
44+
conn, project.name, domain, user
45+
)
4046
return result
4147

4248
def create_and_get_domain(self) -> Domain:
4349
if self.obj:
4450
return self.obj
4551

4652
self.obj = self.conn.identity.create_domain(
47-
name=self.domain_name,
48-
description="Automated creation",
49-
enabled=True
53+
name=self.domain_name, description="Automated creation", enabled=True
5054
)
5155
DomainCache.add(self.obj.id, self.obj.name)
5256
LOGGER.info(f"Created {DomainCache.ident_by_id(self.obj.id)}")
5357

54-
self.workload_user = WorkloadGeneratorDomain._get_user(self.conn, self.domain_name, self.obj)
58+
self.workload_user = WorkloadGeneratorDomain._get_user(
59+
self.conn, self.domain_name, self.obj
60+
)
5561
return self.obj
5662

5763
def disable_domain(self):
@@ -92,7 +98,9 @@ def create_and_get_projects(self, create_projects: list[str]):
9298
for project_name in create_projects:
9399
if project_name in self.workload_projects:
94100
continue
95-
project = WorkloadGeneratorProject(self.conn, project_name, self.obj, self.workload_user)
101+
project = WorkloadGeneratorProject(
102+
self.conn, project_name, self.obj, self.workload_user
103+
)
96104
project.create_and_get_project()
97105
project.get_or_create_ssh_key()
98106
self.workload_projects[project_name] = project

0 commit comments

Comments
 (0)