Skip to content

Commit e166889

Browse files
committed
Add response parameters test
* Testing to check parameters set in response object * Complete basic response parameter test case * Complete the response parameters test and add decoupled test * Add leak detector to tests
1 parent 2408167 commit e166889

File tree

7 files changed

+476
-2
lines changed

7 files changed

+476
-2
lines changed
Lines changed: 164 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,164 @@
1+
# Copyright 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2+
#
3+
# Redistribution and use in source and binary forms, with or without
4+
# modification, are permitted provided that the following conditions
5+
# are met:
6+
# * Redistributions of source code must retain the above copyright
7+
# notice, this list of conditions and the following disclaimer.
8+
# * Redistributions in binary form must reproduce the above copyright
9+
# notice, this list of conditions and the following disclaimer in the
10+
# documentation and/or other materials provided with the distribution.
11+
# * Neither the name of NVIDIA CORPORATION nor the names of its
12+
# contributors may be used to endorse or promote products derived
13+
# from this software without specific prior written permission.
14+
#
15+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
16+
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17+
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
18+
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
19+
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
20+
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21+
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22+
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
23+
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26+
27+
import sys
28+
29+
sys.path.append("../../common")
30+
31+
import json
32+
import unittest
33+
34+
import numpy as np
35+
import shm_util
36+
import tritonclient.grpc as grpcclient
37+
from tritonclient.utils import InferenceServerException
38+
39+
40+
class ResponseParametersTest(unittest.TestCase):
41+
_server_address_grpc = "localhost:8001"
42+
_model_name = "response_parameters"
43+
_shape = [1, 1]
44+
45+
def setUp(self):
46+
self._shm_leak_detector = shm_util.ShmLeakDetector()
47+
48+
def _assert_response_parameters_match(self, infer_result, expected_params):
49+
res_params = {}
50+
for param_key, param_value in infer_result.get_response().parameters.items():
51+
if param_value.HasField("bool_param"):
52+
value = param_value.bool_param
53+
elif param_value.HasField("int64_param"):
54+
value = param_value.int64_param
55+
elif param_value.HasField("string_param"):
56+
value = param_value.string_param
57+
else:
58+
raise ValueError(f"Unsupported parameter choice: {param_value}")
59+
res_params[param_key] = value
60+
self.assertEqual(expected_params, res_params)
61+
62+
def _assert_response_parameters_infer_success(self, params):
63+
params_str = json.dumps(params)
64+
65+
inputs = [grpcclient.InferInput("RESPONSE_PARAMETERS", self._shape, "BYTES")]
66+
inputs[0].set_data_from_numpy(np.array([[params_str]], dtype=np.object_))
67+
68+
with self._shm_leak_detector.Probe() as shm_probe:
69+
with grpcclient.InferenceServerClient(self._server_address_grpc) as client:
70+
result = client.infer(self._model_name, inputs)
71+
72+
# verify the response parameters
73+
self._assert_response_parameters_match(result, params)
74+
75+
# model returns the input as output
76+
output = str(result.as_numpy("OUTPUT")[0][0], encoding="utf-8")
77+
self.assertEqual(params_str, output)
78+
79+
def _assert_response_parameters_infer_fail(self, params, expected_err_msg):
80+
params_str = json.dumps(params)
81+
82+
inputs = [grpcclient.InferInput("RESPONSE_PARAMETERS", self._shape, "BYTES")]
83+
inputs[0].set_data_from_numpy(np.array([[params_str]], dtype=np.object_))
84+
85+
with self._shm_leak_detector.Probe() as shm_probe:
86+
with grpcclient.InferenceServerClient(self._server_address_grpc) as client:
87+
with self.assertRaises(InferenceServerException) as e:
88+
result = client.infer(self._model_name, inputs)
89+
90+
self.assertIn("[StatusCode.INVALID_ARGUMENT] ", str(e.exception))
91+
self.assertIn(expected_err_msg, str(e.exception))
92+
93+
def test_setting_empty_response_parameters(self):
94+
params = {}
95+
self._assert_response_parameters_infer_success(params)
96+
97+
def test_setting_one_element_response_parameters(self):
98+
params = {"many_elements": False}
99+
self._assert_response_parameters_infer_success(params)
100+
101+
def test_setting_three_element_response_parameters(self):
102+
params = {"bool": True, "str": "Hello World!", "int": 1024}
103+
self._assert_response_parameters_infer_success(params)
104+
105+
def test_setting_multi_element_response_parameters(self):
106+
params = {"a": "1", "b": "2", "c": 3, "d": False, "e": 5, "f": ""}
107+
self._assert_response_parameters_infer_success(params)
108+
109+
def test_setting_wrong_type_response_parameters(self):
110+
params = []
111+
expected_err_msg = ", got <class 'list'>"
112+
self._assert_response_parameters_infer_fail(params, expected_err_msg)
113+
114+
def test_setting_float_response_parameters(self):
115+
params = {"int": 2, "float": 0.5}
116+
expected_err_msg = "Expect parameters values to have type bool/int/str, found type <class 'float'>"
117+
self._assert_response_parameters_infer_fail(params, expected_err_msg)
118+
119+
def test_setting_null_response_parameters(self):
120+
params = {"bool": True, "null": None}
121+
expected_err_msg = "Expect parameters values to have type bool/int/str, found type <class 'NoneType'>"
122+
self._assert_response_parameters_infer_fail(params, expected_err_msg)
123+
124+
def test_setting_nested_response_parameters(self):
125+
params = {"str": "", "list": ["variable"]}
126+
expected_err_msg = "Expect parameters values to have type bool/int/str, found type <class 'list'>"
127+
self._assert_response_parameters_infer_fail(params, expected_err_msg)
128+
129+
def test_setting_response_parameters_decoupled(self):
130+
model_name = "response_parameters_decoupled"
131+
params = [{"bool": False, "int": 2048}, {"str": "Hello World!"}]
132+
params_str = json.dumps(params)
133+
134+
inputs = [grpcclient.InferInput("RESPONSE_PARAMETERS", self._shape, "BYTES")]
135+
inputs[0].set_data_from_numpy(np.array([[params_str]], dtype=np.object_))
136+
137+
responses = []
138+
with self._shm_leak_detector.Probe() as shm_probe:
139+
with grpcclient.InferenceServerClient(self._server_address_grpc) as client:
140+
client.start_stream(
141+
callback=(lambda result, error: responses.append((result, error)))
142+
)
143+
client.async_stream_infer(model_name=model_name, inputs=inputs)
144+
client.stop_stream()
145+
146+
self.assertEqual(len(params), len(responses))
147+
for i in range(len(params)):
148+
result, error = responses[i]
149+
self.assertIsNone(error)
150+
151+
# Since this is a decoupled model, the 'triton_final_response' parameter
152+
# will be a part of the response parameters, so include it into the expected
153+
# parameters. The model sends the complete final flag separately from the
154+
# response, so the parameter is always False.
155+
expected_params = params[i].copy()
156+
expected_params["triton_final_response"] = False
157+
self._assert_response_parameters_match(result, expected_params)
158+
159+
output = str(result.as_numpy("OUTPUT")[0][0], encoding="utf-8")
160+
self.assertEqual(json.dumps(params[i]), output)
161+
162+
163+
if __name__ == "__main__":
164+
unittest.main()
Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
#!/bin/bash
2+
# Copyright 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
3+
#
4+
# Redistribution and use in source and binary forms, with or without
5+
# modification, are permitted provided that the following conditions
6+
# are met:
7+
# * Redistributions of source code must retain the above copyright
8+
# notice, this list of conditions and the following disclaimer.
9+
# * Redistributions in binary form must reproduce the above copyright
10+
# notice, this list of conditions and the following disclaimer in the
11+
# documentation and/or other materials provided with the distribution.
12+
# * Neither the name of NVIDIA CORPORATION nor the names of its
13+
# contributors may be used to endorse or promote products derived
14+
# from this software without specific prior written permission.
15+
#
16+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
17+
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18+
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19+
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
20+
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
21+
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
22+
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
23+
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
24+
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27+
28+
source ../../common/util.sh
29+
30+
RET=0
31+
32+
#
33+
# Test response parameters
34+
#
35+
rm -rf models && mkdir models
36+
mkdir -p models/response_parameters/1 && \
37+
cp ../../python_models/response_parameters/model.py models/response_parameters/1 && \
38+
cp ../../python_models/response_parameters/config.pbtxt models/response_parameters
39+
mkdir -p models/response_parameters_decoupled/1 && \
40+
cp ../../python_models/response_parameters_decoupled/model.py models/response_parameters_decoupled/1 && \
41+
cp ../../python_models/response_parameters_decoupled/config.pbtxt models/response_parameters_decoupled
42+
43+
TEST_LOG="response_parameters_test.log"
44+
SERVER_LOG="response_parameters_test.server.log"
45+
SERVER_ARGS="--model-repository=${MODELDIR}/parameters/models --backend-directory=${BACKEND_DIR} --log-verbose=1"
46+
47+
run_server
48+
if [ "$SERVER_PID" == "0" ]; then
49+
echo -e "\n***\n*** Failed to start $SERVER\n***"
50+
cat $SERVER_LOG
51+
exit 1
52+
fi
53+
54+
set +e
55+
python3 -m pytest --junitxml=response_parameters_test.report.xml response_parameters_test.py > $TEST_LOG 2>&1
56+
if [ $? -ne 0 ]; then
57+
echo -e "\n***\n*** Response parameters test FAILED\n***"
58+
cat $TEST_LOG
59+
RET=1
60+
fi
61+
set -e
62+
63+
kill $SERVER_PID
64+
wait $SERVER_PID
65+
66+
if [ $RET -eq 1 ]; then
67+
echo -e "\n***\n*** Parameters test FAILED\n***"
68+
else
69+
echo -e "\n***\n*** Parameters test Passed\n***"
70+
fi
71+
exit $RET

qa/L0_backend_python/test.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
#!/bin/bash
2-
# Copyright 2020-2024, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2+
# Copyright 2020-2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
33
#
44
# Redistribution and use in source and binary forms, with or without
55
# modification, are permitted provided that the following conditions
@@ -457,7 +457,7 @@ if [ "$TEST_JETSON" == "0" ]; then
457457
fi
458458
fi
459459

460-
SUBTESTS="lifecycle argument_validation logging custom_metrics"
460+
SUBTESTS="lifecycle argument_validation logging custom_metrics parameters"
461461
# [DLIS-6124] Disable restart test for Windows since it requires more investigation
462462
# [DLIS-6122] Disable model_control & request_rescheduling tests for Windows since they require load/unload
463463
# [DLIS-6123] Disable examples test for Windows since it requires updates to the example clients
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
# Copyright 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2+
#
3+
# Redistribution and use in source and binary forms, with or without
4+
# modification, are permitted provided that the following conditions
5+
# are met:
6+
# * Redistributions of source code must retain the above copyright
7+
# notice, this list of conditions and the following disclaimer.
8+
# * Redistributions in binary form must reproduce the above copyright
9+
# notice, this list of conditions and the following disclaimer in the
10+
# documentation and/or other materials provided with the distribution.
11+
# * Neither the name of NVIDIA CORPORATION nor the names of its
12+
# contributors may be used to endorse or promote products derived
13+
# from this software without specific prior written permission.
14+
#
15+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
16+
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17+
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
18+
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
19+
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
20+
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21+
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22+
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
23+
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26+
27+
name: "response_parameters"
28+
backend: "python"
29+
max_batch_size: 8
30+
31+
input [
32+
{
33+
name: "RESPONSE_PARAMETERS"
34+
data_type: TYPE_STRING
35+
dims: [ 1 ]
36+
}
37+
]
38+
39+
output [
40+
{
41+
name: "OUTPUT"
42+
data_type: TYPE_STRING
43+
dims: [ 1 ]
44+
}
45+
]
46+
47+
instance_group [
48+
{
49+
count: 1
50+
kind: KIND_CPU
51+
}
52+
]
Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
# Copyright 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2+
#
3+
# Redistribution and use in source and binary forms, with or without
4+
# modification, are permitted provided that the following conditions
5+
# are met:
6+
# * Redistributions of source code must retain the above copyright
7+
# notice, this list of conditions and the following disclaimer.
8+
# * Redistributions in binary form must reproduce the above copyright
9+
# notice, this list of conditions and the following disclaimer in the
10+
# documentation and/or other materials provided with the distribution.
11+
# * Neither the name of NVIDIA CORPORATION nor the names of its
12+
# contributors may be used to endorse or promote products derived
13+
# from this software without specific prior written permission.
14+
#
15+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
16+
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17+
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
18+
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
19+
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
20+
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21+
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22+
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
23+
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26+
27+
import json
28+
29+
import numpy as np
30+
import triton_python_backend_utils as pb_utils
31+
32+
33+
class TritonPythonModel:
34+
def execute(self, requests):
35+
responses = []
36+
37+
for request in requests:
38+
res_params_tensor = pb_utils.get_input_tensor_by_name(
39+
request, "RESPONSE_PARAMETERS"
40+
).as_numpy()
41+
res_params_str = str(res_params_tensor[0][0], encoding="utf-8")
42+
output_tensor = pb_utils.Tensor(
43+
"OUTPUT", np.array([[res_params_str]], dtype=np.object_)
44+
)
45+
try:
46+
res_params = json.loads(res_params_str)
47+
response = pb_utils.InferenceResponse(
48+
output_tensors=[output_tensor], parameters=res_params
49+
)
50+
51+
res_params_set = {}
52+
if response.parameters() != "":
53+
res_params_set = json.loads(response.parameters())
54+
if res_params_set != res_params:
55+
raise Exception("Response parameters set differ from provided")
56+
except Exception as e:
57+
error = pb_utils.TritonError(
58+
message=str(e), code=pb_utils.TritonError.INVALID_ARG
59+
)
60+
response = pb_utils.InferenceResponse(error=error)
61+
62+
responses.append(response)
63+
64+
return responses

0 commit comments

Comments
 (0)