Skip to content

Commit d70c81d

Browse files
authored
Merge pull request #51 from stackhpc/upstream/wallaby-2023-07-31
Synchronise wallaby with upstream
2 parents 25e2d64 + 8fb6811 commit d70c81d

File tree

4 files changed

+238
-11
lines changed

4 files changed

+238
-11
lines changed

nova/api/openstack/compute/flavor_access.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,14 @@ def _remove_tenant_access(self, req, id, body):
9393

9494
vals = body['removeTenantAccess']
9595
tenant = vals['tenant']
96-
identity.verify_project_id(context, tenant)
96+
# It doesn't really matter if project exists or not: we can delete
97+
# it from flavor's access list in both cases.
98+
try:
99+
identity.verify_project_id(context, tenant)
100+
except webob.exc.HTTPBadRequest as identity_exc:
101+
msg = "Project ID %s is not a valid project." % tenant
102+
if msg not in identity_exc.explanation:
103+
raise
97104

98105
# NOTE(gibi): We have to load a flavor from the db here as
99106
# flavor.remove_access() will try to emit a notification and that needs

nova/api/openstack/identity.py

Lines changed: 13 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -27,24 +27,27 @@ def verify_project_id(context, project_id):
2727
"""verify that a project_id exists.
2828
2929
This attempts to verify that a project id exists. If it does not,
30-
an HTTPBadRequest is emitted.
30+
an HTTPBadRequest is emitted. Also HTTPBadRequest is emitted
31+
if Keystone identity service version 3.0 is not found.
3132
3233
"""
3334
adap = utils.get_ksa_adapter(
3435
'identity', ksa_auth=context.get_auth_plugin(),
3536
min_version=(3, 0), max_version=(3, 'latest'))
3637

37-
failure = webob.exc.HTTPBadRequest(
38-
explanation=_("Project ID %s is not a valid project.") %
39-
project_id)
4038
try:
4139
resp = adap.get('/projects/%s' % project_id)
4240
except kse.EndpointNotFound:
4341
LOG.error(
44-
"Keystone identity service version 3.0 was not found. This might "
45-
"be because your endpoint points to the v2.0 versioned endpoint "
46-
"which is not supported. Please fix this.")
47-
raise failure
42+
"Keystone identity service version 3.0 was not found. This "
43+
"might be caused by Nova misconfiguration or Keystone "
44+
"problems.")
45+
msg = _("Nova was unable to find Keystone service endpoint.")
46+
# TODO(astupnik). It may be reasonable to switch to HTTP 503
47+
# (HTTP Service Unavailable) instead of HTTP Bad Request here.
48+
# If proper Keystone servie is inaccessible, then technially
49+
# this is a server side error and not an error in Nova.
50+
raise webob.exc.HTTPBadRequest(explanation=msg)
4851
except kse.ClientException:
4952
# something is wrong, like there isn't a keystone v3 endpoint,
5053
# or nova isn't configured for the interface to talk to it;
@@ -57,7 +60,8 @@ def verify_project_id(context, project_id):
5760
return True
5861
elif resp.status_code == 404:
5962
# we got access, and we know this project is not there
60-
raise failure
63+
msg = _("Project ID %s is not a valid project.") % project_id
64+
raise webob.exc.HTTPBadRequest(explanation=msg)
6165
elif resp.status_code == 403:
6266
# we don't have enough permission to verify this, so default
6367
# to "it's ok".
Lines changed: 193 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,193 @@
1+
# All Rights Reserved.
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License"); you may
4+
# not use this file except in compliance with the License. You may obtain
5+
# a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11+
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12+
# License for the specific language governing permissions and limitations
13+
# under the License.
14+
import fixtures
15+
16+
from oslo_serialization import jsonutils
17+
18+
from nova.tests.functional.api import client
19+
from nova.tests.functional.libvirt import test_pci_sriov_servers
20+
from nova.tests.unit.virt.libvirt import fakelibvirt
21+
22+
23+
class TestPciResize(test_pci_sriov_servers._PCIServersTestBase):
24+
# these tests use multiple different configs so the whitelist is set by
25+
# each testcase individually
26+
PCI_PASSTHROUGH_WHITELIST = []
27+
PCI_ALIAS = [
28+
jsonutils.dumps(x)
29+
for x in [
30+
{
31+
"vendor_id": fakelibvirt.PCI_VEND_ID,
32+
"product_id": fakelibvirt.PCI_PROD_ID,
33+
"name": "a-pci-dev",
34+
},
35+
{
36+
"vendor_id": fakelibvirt.PCI_VEND_ID,
37+
"product_id": fakelibvirt.PF_PROD_ID,
38+
"device_type": "type-PF",
39+
"name": "a-pf",
40+
},
41+
{
42+
"vendor_id": fakelibvirt.PCI_VEND_ID,
43+
"product_id": fakelibvirt.VF_PROD_ID,
44+
"device_type": "type-VF",
45+
"name": "a-vf",
46+
},
47+
]
48+
]
49+
50+
def setUp(self):
51+
super().setUp()
52+
self.useFixture(
53+
fixtures.MockPatch(
54+
'nova.virt.libvirt.driver.LibvirtDriver.'
55+
'migrate_disk_and_power_off',
56+
return_value='{}'
57+
)
58+
)
59+
# These tests should not depend on the host's sysfs
60+
self.useFixture(
61+
fixtures.MockPatch('nova.pci.utils.is_physical_function'))
62+
self.useFixture(
63+
fixtures.MockPatch(
64+
'nova.pci.utils.get_function_by_ifname',
65+
return_value=(None, False)
66+
)
67+
)
68+
69+
def _test_resize_from_two_devs_to_one_dev(self, num_pci_on_dest):
70+
# The fake libvirt will emulate on the host:
71+
# * two type-PCI in slot 0, 1
72+
compute1_pci_info = fakelibvirt.HostPCIDevicesInfo(num_pci=2)
73+
# the config matches the PCI dev
74+
compute1_device_spec = [
75+
jsonutils.dumps(x)
76+
for x in [
77+
{
78+
"vendor_id": fakelibvirt.PCI_VEND_ID,
79+
"product_id": fakelibvirt.PCI_PROD_ID,
80+
},
81+
]
82+
]
83+
self.flags(group='pci', passthrough_whitelist=compute1_device_spec)
84+
self.start_compute(hostname="compute1", pci_info=compute1_pci_info)
85+
self.assertPCIDeviceCounts("compute1", total=2, free=2)
86+
87+
# create a server that requests two PCI devs
88+
extra_spec = {"pci_passthrough:alias": "a-pci-dev:2"}
89+
flavor_id = self._create_flavor(extra_spec=extra_spec)
90+
server = self._create_server(flavor_id=flavor_id, networks=[])
91+
self.assertPCIDeviceCounts("compute1", total=2, free=0)
92+
93+
# start another compute with a different amount of PCI dev available
94+
compute2_pci_info = fakelibvirt.HostPCIDevicesInfo(
95+
num_pci=num_pci_on_dest)
96+
# the config matches the PCI dev
97+
compute2_device_spec = [
98+
jsonutils.dumps(x)
99+
for x in [
100+
{
101+
"vendor_id": fakelibvirt.PCI_VEND_ID,
102+
"product_id": fakelibvirt.PCI_PROD_ID,
103+
},
104+
]
105+
]
106+
self.flags(group='pci', passthrough_whitelist=compute2_device_spec)
107+
self.start_compute(hostname="compute2", pci_info=compute2_pci_info)
108+
self.assertPCIDeviceCounts(
109+
"compute2", total=num_pci_on_dest, free=num_pci_on_dest)
110+
111+
# resize the server to request only one PCI dev instead of the current
112+
# two. This should fit to compute2 having at least one dev
113+
extra_spec = {"pci_passthrough:alias": "a-pci-dev:1"}
114+
flavor_id = self._create_flavor(extra_spec=extra_spec)
115+
self._resize_server(server, flavor_id=flavor_id)
116+
self._confirm_resize(server)
117+
self.assertPCIDeviceCounts("compute1", total=2, free=2)
118+
self.assertPCIDeviceCounts(
119+
"compute2", total=num_pci_on_dest, free=num_pci_on_dest - 1)
120+
121+
def test_resize_from_two_devs_to_one_dev_dest_has_two_devs(self):
122+
# this works
123+
self._test_resize_from_two_devs_to_one_dev(num_pci_on_dest=2)
124+
125+
def test_resize_from_two_devs_to_one_dev_dest_has_one_dev(self):
126+
# This is bug 1983753 as nova uses the old InstancePciRequest during
127+
# the scheduling and therefore tries to find a compute with two PCI
128+
# devs even though the flavor only requests one.
129+
ex = self.assertRaises(
130+
client.OpenStackApiException,
131+
self._test_resize_from_two_devs_to_one_dev,
132+
num_pci_on_dest=1
133+
)
134+
self.assertIn('nova.exception.NoValidHost', str(ex))
135+
136+
def test_resize_from_vf_to_pf(self):
137+
# The fake libvirt will emulate on the host:
138+
# * one type-PF in slot 0 with one VF
139+
compute1_pci_info = fakelibvirt.HostPCIDevicesInfo(
140+
num_pfs=1, num_vfs=1)
141+
# the config matches only the VF
142+
compute1_device_spec = [
143+
jsonutils.dumps(x)
144+
for x in [
145+
{
146+
"vendor_id": fakelibvirt.PCI_VEND_ID,
147+
"product_id": fakelibvirt.VF_PROD_ID,
148+
},
149+
]
150+
]
151+
self.flags(group='pci', passthrough_whitelist=compute1_device_spec)
152+
self.start_compute(hostname="compute1", pci_info=compute1_pci_info)
153+
self.assertPCIDeviceCounts("compute1", total=1, free=1)
154+
155+
# create a server that requests one Vf
156+
extra_spec = {"pci_passthrough:alias": "a-vf:1"}
157+
flavor_id = self._create_flavor(extra_spec=extra_spec)
158+
server = self._create_server(flavor_id=flavor_id, networks=[])
159+
self.assertPCIDeviceCounts("compute1", total=1, free=0)
160+
161+
# start another compute with a single PF dev available
162+
# The fake libvirt will emulate on the host:
163+
# * one type-PF in slot 0 with 1 VF
164+
compute2_pci_info = fakelibvirt.HostPCIDevicesInfo(
165+
num_pfs=1, num_vfs=1)
166+
# the config matches the PF dev but not the VF
167+
compute2_device_spec = [
168+
jsonutils.dumps(x)
169+
for x in [
170+
{
171+
"vendor_id": fakelibvirt.PCI_VEND_ID,
172+
"product_id": fakelibvirt.PF_PROD_ID,
173+
},
174+
]
175+
]
176+
self.flags(group='pci', passthrough_whitelist=compute2_device_spec)
177+
self.start_compute(hostname="compute2", pci_info=compute2_pci_info)
178+
self.assertPCIDeviceCounts("compute2", total=1, free=1)
179+
180+
# resize the server to request on PF dev instead of the current VF
181+
# dev. This should fit to compute2 having exactly one PF dev.
182+
extra_spec = {"pci_passthrough:alias": "a-pf:1"}
183+
flavor_id = self._create_flavor(extra_spec=extra_spec)
184+
# This is bug 1983753 as nova uses the old InstancePciRequest during
185+
# the scheduling and therefore tries to find a compute with a VF dev
186+
# even though the flavor only requests a PF dev.
187+
ex = self.assertRaises(
188+
client.OpenStackApiException,
189+
self._resize_server,
190+
server,
191+
flavor_id=flavor_id,
192+
)
193+
self.assertIn('nova.exception.NoValidHost', str(ex))

nova/tests/unit/api/openstack/compute/test_flavor_access.py

Lines changed: 24 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -351,14 +351,37 @@ def test_add_tenant_access_with_invalid_tenant(self, mock_verify):
351351
mock_verify.assert_called_once_with(
352352
req.environ['nova.context'], 'proj2')
353353

354+
@mock.patch('nova.objects.Flavor.remove_access')
354355
@mock.patch('nova.api.openstack.identity.verify_project_id',
355356
side_effect=exc.HTTPBadRequest(
356357
explanation="Project ID proj2 is not a valid project."))
357-
def test_remove_tenant_access_with_invalid_tenant(self, mock_verify):
358+
def test_remove_tenant_access_with_invalid_tenant(self,
359+
mock_verify,
360+
mock_remove_access):
358361
"""Tests the case that the tenant does not exist in Keystone."""
359362
req = fakes.HTTPRequest.blank(self._prefix + '/flavors/2/action',
360363
use_admin_context=True)
361364
body = {'removeTenantAccess': {'tenant': 'proj2'}}
365+
366+
self.flavor_action_controller._remove_tenant_access(
367+
req, '2', body=body)
368+
mock_verify.assert_called_once_with(
369+
req.environ['nova.context'], 'proj2')
370+
mock_remove_access.assert_called_once_with('proj2')
371+
372+
@mock.patch('nova.api.openstack.identity.verify_project_id',
373+
side_effect=exc.HTTPBadRequest(
374+
explanation="Nova was unable to find Keystone "
375+
"service endpoint."))
376+
def test_remove_tenant_access_missing_keystone_endpoint(self,
377+
mock_verify):
378+
"""Tests the case that Keystone identity service endpoint
379+
version 3.0 was not found.
380+
"""
381+
req = fakes.HTTPRequest.blank(self._prefix + '/flavors/2/action',
382+
use_admin_context=True)
383+
body = {'removeTenantAccess': {'tenant': 'proj2'}}
384+
362385
self.assertRaises(exc.HTTPBadRequest,
363386
self.flavor_action_controller._remove_tenant_access,
364387
req, '2', body=body)

0 commit comments

Comments
 (0)