Skip to content

Commit 650b7bc

Browse files
committed
test: Add tests for response parameters support for BLS in Python backend
1 parent d3ee577 commit 650b7bc

File tree

6 files changed

+448
-0
lines changed

6 files changed

+448
-0
lines changed
Lines changed: 171 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,171 @@
1+
# Copyright 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2+
#
3+
# Redistribution and use in source and binary forms, with or without
4+
# modification, are permitted provided that the following conditions
5+
# are met:
6+
# * Redistributions of source code must retain the above copyright
7+
# notice, this list of conditions and the following disclaimer.
8+
# * Redistributions in binary form must reproduce the above copyright
9+
# notice, this list of conditions and the following disclaimer in the
10+
# documentation and/or other materials provided with the distribution.
11+
# * Neither the name of NVIDIA CORPORATION nor the names of its
12+
# contributors may be used to endorse or promote products derived
13+
# from this software without specific prior written permission.
14+
#
15+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
16+
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17+
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
18+
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
19+
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
20+
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21+
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22+
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
23+
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26+
27+
import sys
28+
29+
sys.path.append("../../common")
30+
31+
import json
32+
import unittest
33+
34+
import numpy as np
35+
import shm_util
36+
import tritonclient.grpc as grpcclient
37+
from tritonclient.utils import InferenceServerException
38+
39+
40+
class ResponseParametersTest(unittest.TestCase):
41+
_server_address_grpc = "localhost:8001"
42+
_model_name = "response_parameters_bls"
43+
_shape = [1, 1]
44+
45+
def setUp(self):
46+
self._shm_leak_detector = shm_util.ShmLeakDetector()
47+
48+
def _assert_response_parameters_match(self, infer_result, expected_params):
49+
res_params = {}
50+
for param_key, param_value in infer_result.get_response().parameters.items():
51+
if param_value.HasField("bool_param"):
52+
value = param_value.bool_param
53+
elif param_value.HasField("int64_param"):
54+
value = param_value.int64_param
55+
elif param_value.HasField("string_param"):
56+
value = param_value.string_param
57+
else:
58+
raise ValueError(f"Unsupported parameter choice: {param_value}")
59+
res_params[param_key] = value
60+
self.assertEqual(expected_params, res_params)
61+
62+
def _assert_response_parameters_infer_success(self, params):
63+
params_str = json.dumps(params)
64+
65+
inputs = [grpcclient.InferInput("RESPONSE_PARAMETERS", self._shape, "BYTES")]
66+
inputs[0].set_data_from_numpy(np.array([[params_str]], dtype=np.object_))
67+
68+
with self._shm_leak_detector.Probe() as shm_probe:
69+
with grpcclient.InferenceServerClient(self._server_address_grpc) as client:
70+
result = client.infer(self._model_name, inputs)
71+
72+
# verify the response parameters
73+
self._assert_response_parameters_match(result, params)
74+
75+
# model returns the input as output
76+
output = str(result.as_numpy("OUTPUT")[0][0], encoding="utf-8")
77+
self.assertEqual(params_str, output)
78+
79+
def _assert_response_parameters_infer_fail(self, params, expected_err_msg):
80+
params_str = json.dumps(params)
81+
82+
inputs = [grpcclient.InferInput("RESPONSE_PARAMETERS", self._shape, "BYTES")]
83+
inputs[0].set_data_from_numpy(np.array([[params_str]], dtype=np.object_))
84+
85+
with self._shm_leak_detector.Probe() as shm_probe:
86+
with grpcclient.InferenceServerClient(self._server_address_grpc) as client:
87+
with self.assertRaises(InferenceServerException) as e:
88+
client.infer(self._model_name, inputs)
89+
90+
self.assertIn("[StatusCode.INVALID_ARGUMENT] ", str(e.exception))
91+
self.assertIn(expected_err_msg, str(e.exception))
92+
93+
def test_setting_empty_response_parameters(self):
94+
params = {}
95+
self._assert_response_parameters_infer_success(params)
96+
97+
def test_setting_one_element_response_parameters(self):
98+
params = {"many_elements": False}
99+
self._assert_response_parameters_infer_success(params)
100+
101+
def test_setting_three_element_response_parameters(self):
102+
params = {"bool": True, "str": "Hello World!", "int": 1024}
103+
self._assert_response_parameters_infer_success(params)
104+
105+
def test_setting_multi_element_response_parameters(self):
106+
params = {"a": "1", "b": "2", "c": 3, "d": False, "e": 5, "f": ""}
107+
self._assert_response_parameters_infer_success(params)
108+
109+
def test_setting_wrong_type_response_parameters(self):
110+
params = []
111+
expected_err_msg = ", got <class 'list'>"
112+
self._assert_response_parameters_infer_fail(params, expected_err_msg)
113+
114+
def test_setting_int_key_type_response_parameters(self):
115+
params = {"1": "int key"}
116+
expected_err_msg = (
117+
"Expect parameters keys to have type str, found type <class 'int'>"
118+
)
119+
self._assert_response_parameters_infer_fail(params, expected_err_msg)
120+
121+
def test_setting_float_response_parameters(self):
122+
params = {"int": 2, "float": 0.5}
123+
expected_err_msg = "Expect parameters values to have type bool/int/str, found type <class 'float'>"
124+
self._assert_response_parameters_infer_fail(params, expected_err_msg)
125+
126+
def test_setting_null_response_parameters(self):
127+
params = {"bool": True, "null": None}
128+
expected_err_msg = "Expect parameters values to have type bool/int/str, found type <class 'NoneType'>"
129+
self._assert_response_parameters_infer_fail(params, expected_err_msg)
130+
131+
def test_setting_nested_response_parameters(self):
132+
params = {"str": "", "list": ["variable"]}
133+
expected_err_msg = "Expect parameters values to have type bool/int/str, found type <class 'list'>"
134+
self._assert_response_parameters_infer_fail(params, expected_err_msg)
135+
136+
def test_setting_response_parameters_decoupled(self):
137+
model_name = "response_parameters_bls_decoupled"
138+
params = [{"bool": False, "int": 2048}, {"str": "Hello World!"}]
139+
params_str = json.dumps(params)
140+
141+
inputs = [grpcclient.InferInput("RESPONSE_PARAMETERS", self._shape, "BYTES")]
142+
inputs[0].set_data_from_numpy(np.array([[params_str]], dtype=np.object_))
143+
144+
responses = []
145+
with self._shm_leak_detector.Probe() as shm_probe:
146+
with grpcclient.InferenceServerClient(self._server_address_grpc) as client:
147+
client.start_stream(
148+
callback=(lambda result, error: responses.append((result, error)))
149+
)
150+
client.async_stream_infer(model_name=model_name, inputs=inputs)
151+
client.stop_stream()
152+
153+
self.assertEqual(len(params), len(responses))
154+
for i in range(len(params)):
155+
result, error = responses[i]
156+
self.assertIsNone(error)
157+
158+
# Since this is a decoupled model, the 'triton_final_response' parameter
159+
# will be a part of the response parameters, so include it into the expected
160+
# parameters. The model sends the complete final flag separately from the
161+
# response, so the parameter is always False.
162+
expected_params = params[i].copy()
163+
expected_params["triton_final_response"] = False
164+
self._assert_response_parameters_match(result, expected_params)
165+
166+
output = str(result.as_numpy("OUTPUT")[0][0], encoding="utf-8")
167+
self.assertEqual(json.dumps(params[i]), output)
168+
169+
170+
if __name__ == "__main__":
171+
unittest.main()

qa/L0_backend_python/parameters/test.sh

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,15 @@ mkdir -p models/response_parameters/1 && \
3939
mkdir -p models/response_parameters_decoupled/1 && \
4040
cp ../../python_models/response_parameters_decoupled/model.py models/response_parameters_decoupled/1 && \
4141
cp ../../python_models/response_parameters_decoupled/config.pbtxt models/response_parameters_decoupled
42+
mkdir -p models/response_parameters_bls/1 && \
43+
cp ../../python_models/response_parameters_bls/model.py models/response_parameters_bls/1 && \
44+
cp ../../python_models/response_parameters_bls/config.pbtxt models/response_parameters_bls
45+
mkdir -p models/response_parameters_bls_decoupled/1 && \
46+
cp ../../python_models/response_parameters_bls_decoupled/model.py models/response_parameters_bls_decoupled/1 && \
47+
cp ../../python_models/response_parameters_bls_decoupled/config.pbtxt models/response_parameters_bls_decoupled
4248

4349
TEST_LOG="response_parameters_test.log"
50+
TEST_BLS_LOG="response_parameters_bls_test.log"
4451
SERVER_LOG="response_parameters_test.server.log"
4552
SERVER_ARGS="--model-repository=${MODELDIR}/parameters/models --backend-directory=${BACKEND_DIR} --log-verbose=1"
4653

@@ -60,6 +67,15 @@ if [ $? -ne 0 ]; then
6067
fi
6168
set -e
6269

70+
set +e
71+
python3 -m pytest --junitxml=response_parameters_bls_test.report.xml response_parameters_bls_test.py > $TEST_BLS_LOG 2>&1
72+
if [ $? -ne 0 ]; then
73+
echo -e "\n***\n*** Response parameters in BLS mode test FAILED\n***"
74+
cat $TEST_BLS_LOG
75+
RET=1
76+
fi
77+
set -e
78+
6379
kill $SERVER_PID
6480
wait $SERVER_PID
6581

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
# Copyright 2023-2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2+
#
3+
# Redistribution and use in source and binary forms, with or without
4+
# modification, are permitted provided that the following conditions
5+
# are met:
6+
# * Redistributions of source code must retain the above copyright
7+
# notice, this list of conditions and the following disclaimer.
8+
# * Redistributions in binary form must reproduce the above copyright
9+
# notice, this list of conditions and the following disclaimer in the
10+
# documentation and/or other materials provided with the distribution.
11+
# * Neither the name of NVIDIA CORPORATION nor the names of its
12+
# contributors may be used to endorse or promote products derived
13+
# from this software without specific prior written permission.
14+
#
15+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
16+
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17+
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
18+
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
19+
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
20+
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21+
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22+
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
23+
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26+
27+
name: "response_parameters_bls"
28+
backend: "python"
29+
max_batch_size: 8
30+
31+
input [
32+
{
33+
name: "RESPONSE_PARAMETERS"
34+
data_type: TYPE_STRING
35+
dims: [ 1 ]
36+
}
37+
]
38+
39+
output [
40+
{
41+
name: "OUTPUT"
42+
data_type: TYPE_STRING
43+
dims: [ 1 ]
44+
}
45+
]
46+
47+
instance_group [
48+
{
49+
count: 1
50+
kind: KIND_CPU
51+
}
52+
]
Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
# Copyright 2023-2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2+
#
3+
# Redistribution and use in source and binary forms, with or without
4+
# modification, are permitted provided that the following conditions
5+
# are met:
6+
# * Redistributions of source code must retain the above copyright
7+
# notice, this list of conditions and the following disclaimer.
8+
# * Redistributions in binary form must reproduce the above copyright
9+
# notice, this list of conditions and the following disclaimer in the
10+
# documentation and/or other materials provided with the distribution.
11+
# * Neither the name of NVIDIA CORPORATION nor the names of its
12+
# contributors may be used to endorse or promote products derived
13+
# from this software without specific prior written permission.
14+
#
15+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
16+
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17+
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
18+
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
19+
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
20+
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21+
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22+
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
23+
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26+
27+
import json
28+
29+
import numpy as np
30+
import triton_python_backend_utils as pb_utils
31+
32+
33+
class TritonPythonModel:
34+
def execute(self, requests):
35+
responses = []
36+
37+
for request in requests:
38+
try:
39+
bls_input_tensor = pb_utils.get_input_tensor_by_name(
40+
request, "RESPONSE_PARAMETERS"
41+
)
42+
bls_request = pb_utils.InferenceRequest(
43+
model_name="response_parameters",
44+
inputs=[bls_input_tensor],
45+
requested_output_names=["OUTPUT"],
46+
)
47+
bls_response = bls_request.exec()
48+
response_tensors = bls_response.output_tensors()
49+
response_parameters_str = bls_response.parameters()
50+
if bls_response.has_error():
51+
print(bls_response.error().message())
52+
raise Exception(bls_response.error().message())
53+
res_params = json.loads(response_parameters_str)
54+
55+
response = pb_utils.InferenceResponse(
56+
output_tensors=response_tensors, parameters=res_params
57+
)
58+
59+
res_params_set = {}
60+
if response.parameters() != "":
61+
res_params_set = json.loads(response.parameters())
62+
if res_params_set != res_params:
63+
raise Exception("Response parameters set differ from provided")
64+
except Exception as e:
65+
error = pb_utils.TritonError(
66+
message=str(e), code=pb_utils.TritonError.INVALID_ARG
67+
)
68+
response = pb_utils.InferenceResponse(error=error)
69+
70+
responses.append(response)
71+
72+
return responses
Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
# Copyright 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2+
#
3+
# Redistribution and use in source and binary forms, with or without
4+
# modification, are permitted provided that the following conditions
5+
# are met:
6+
# * Redistributions of source code must retain the above copyright
7+
# notice, this list of conditions and the following disclaimer.
8+
# * Redistributions in binary form must reproduce the above copyright
9+
# notice, this list of conditions and the following disclaimer in the
10+
# documentation and/or other materials provided with the distribution.
11+
# * Neither the name of NVIDIA CORPORATION nor the names of its
12+
# contributors may be used to endorse or promote products derived
13+
# from this software without specific prior written permission.
14+
#
15+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
16+
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17+
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
18+
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
19+
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
20+
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21+
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22+
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
23+
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26+
27+
name: "response_parameters_bls_decoupled"
28+
backend: "python"
29+
max_batch_size: 8
30+
31+
input [
32+
{
33+
name: "RESPONSE_PARAMETERS"
34+
data_type: TYPE_STRING
35+
dims: [ 1 ]
36+
}
37+
]
38+
39+
output [
40+
{
41+
name: "OUTPUT"
42+
data_type: TYPE_STRING
43+
dims: [ 1 ]
44+
}
45+
]
46+
47+
instance_group [
48+
{
49+
count: 1
50+
kind: KIND_CPU
51+
}
52+
]
53+
54+
model_transaction_policy {
55+
decoupled: True
56+
}

0 commit comments

Comments
 (0)