Skip to content

Commit 8f65350

Browse files
committed
adopt jacky's suggestion
1 parent 570c6da commit 8f65350

File tree

6 files changed

+68
-174
lines changed

6 files changed

+68
-174
lines changed

qa/L0_backend_python/parameters/response_parameters_test.py

Lines changed: 17 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -169,15 +169,28 @@ def test_setting_response_parameters_decoupled(self):
169169

170170
def test_setting_response_parameters_bls_decoupled(self):
171171
model_name = "response_parameters_bls_decoupled"
172-
params = [{"bool": False, "int": 2048}, {"str": "Hello World!"}]
172+
params = {"bool": False, "int": 2048, "str": "Hello World!"}
173+
params_decoupled = [{}, {"bool": True, "int": 10000}, {"str": "?"}]
173174
params_str = json.dumps(params)
174-
175-
inputs = [grpcclient.InferInput("RESPONSE_PARAMETERS", self._shape, "BYTES")]
175+
params_decoupled_str = json.dumps(params_decoupled)
176+
177+
inputs = [
178+
grpcclient.InferInput("RESPONSE_PARAMETERS", self._shape, "BYTES"),
179+
grpcclient.InferInput(
180+
"RESPONSE_PARAMETERS_DECOUPLED", self._shape, "BYTES"
181+
),
182+
]
176183
inputs[0].set_data_from_numpy(np.array([[params_str]], dtype=np.object_))
184+
inputs[1].set_data_from_numpy(
185+
np.array([[params_decoupled_str]], dtype=np.object_)
186+
)
177187

178188
with self._shm_leak_detector.Probe() as shm_probe:
179189
with grpcclient.InferenceServerClient(self._server_address_grpc) as client:
180-
client.infer(model_name, inputs)
190+
result = client.infer(model_name, inputs)
191+
192+
output = str(result.as_numpy("OUTPUT")[0][0], encoding="utf-8")
193+
self.assertEqual(output, "True")
181194

182195

183196
if __name__ == "__main__":

qa/L0_backend_python/parameters/test.sh

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,6 @@ mkdir -p models/response_parameters_bls_decoupled/1 && \
4747
cp ../../python_models/response_parameters_bls_decoupled/config.pbtxt models/response_parameters_bls_decoupled
4848

4949
TEST_LOG="response_parameters_test.log"
50-
TEST_BLS_LOG="response_parameters_bls_test.log"
5150
SERVER_LOG="response_parameters_test.server.log"
5251
SERVER_ARGS="--model-repository=${MODELDIR}/parameters/models --backend-directory=${BACKEND_DIR} --log-verbose=1"
5352

@@ -59,19 +58,12 @@ if [ "$SERVER_PID" == "0" ]; then
5958
fi
6059

6160
set +e
62-
MODEL_NAME=response_parameters python3 -m pytest --junitxml=response_parameters_test.report.xml response_parameters_test.py > $TEST_LOG 2>&1
61+
python3 -m pytest --junitxml=response_parameters_test.report.xml response_parameters_test.py > $TEST_LOG 2>&1
6362
if [ $? -ne 0 ]; then
6463
echo -e "\n***\n*** Response parameters test FAILED\n***"
6564
cat $TEST_LOG
6665
RET=1
6766
fi
68-
69-
MODEL_NAME=response_parameters_bls python3 -m pytest -s --junitxml=response_parameters_bls_test.report.xml response_parameters_test.py > $TEST_BLS_LOG 2>&1
70-
if [ $? -ne 0 ]; then
71-
echo -e "\n***\n*** Response parameters BLS test FAILED\n***"
72-
cat $TEST_BLS_LOG
73-
RET=1
74-
fi
7567
set -e
7668

7769
kill $SERVER_PID

qa/python_models/response_parameters_bls/config.pbtxt

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,11 @@ input [
3333
name: "RESPONSE_PARAMETERS"
3434
data_type: TYPE_STRING
3535
dims: [ 1 ]
36+
},
37+
{
38+
name: "RESPONSE_PARAMETERS_DECOUPLED"
39+
data_type: TYPE_STRING
40+
dims: [ 1 ]
3641
}
3742
]
3843

qa/python_models/response_parameters_bls/model.py

Lines changed: 45 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626

2727
import json
2828

29+
import numpy as np
2930
import triton_python_backend_utils as pb_utils
3031

3132

@@ -45,37 +46,57 @@ def execute(self, requests):
4546
responses = []
4647

4748
for request in requests:
48-
bls_input_tensor = pb_utils.get_input_tensor_by_name(
49+
passed = True
50+
51+
# test bls response parameters from a regular model
52+
res_params_tensor = pb_utils.get_input_tensor_by_name(
4953
request, "RESPONSE_PARAMETERS"
50-
)
51-
bls_request = pb_utils.InferenceRequest(
54+
).as_numpy()
55+
res_params_str = str(res_params_tensor[0][0], encoding="utf-8")
56+
res_params = json.loads(res_params_str)
57+
bls_input_tensor = pb_utils.Tensor("RESPONSE_PARAMETERS", res_params_tensor)
58+
bls_req = pb_utils.InferenceRequest(
5259
model_name="response_parameters",
5360
inputs=[bls_input_tensor],
54-
requested_output_names=["OUTPUT"],
5561
)
56-
try:
57-
bls_response = bls_request.exec()
58-
response_tensors = bls_response.output_tensors()
59-
response_parameters_str = bls_response.parameters()
60-
if bls_response.has_error():
61-
raise Exception(bls_response.error().message())
62-
res_params = json.loads(response_parameters_str)
63-
64-
response = pb_utils.InferenceResponse(
65-
output_tensors=response_tensors, parameters=res_params
66-
)
62+
bls_res = bls_req.exec() # decoupled=False
63+
bls_res_params_str = bls_res.parameters()
64+
bls_res_params = (
65+
json.loads(bls_res_params_str) if bls_res_params_str != "" else {}
66+
)
67+
passed = passed and bls_res_params == res_params
6768

68-
res_params_set = {}
69-
if response.parameters() != "":
70-
res_params_set = json.loads(response.parameters())
71-
if res_params_set != res_params:
72-
raise Exception("Response parameters set differ from provided")
73-
except Exception as e:
74-
error = pb_utils.TritonError(
75-
message=str(e), code=pb_utils.TritonError.INVALID_ARG
69+
# test bls response parameters from a decoupled model
70+
res_params_decoupled_tensor = pb_utils.get_input_tensor_by_name(
71+
request, "RESPONSE_PARAMETERS_DECOUPLED"
72+
).as_numpy()
73+
res_params_decoupled_str = str(
74+
res_params_decoupled_tensor[0][0], encoding="utf-8"
75+
)
76+
res_params_decoupled = json.loads(res_params_decoupled_str)
77+
bls_decoupled_input_tensor = pb_utils.Tensor(
78+
"RESPONSE_PARAMETERS_DECOUPLED", res_params_decoupled_tensor
79+
)
80+
bls_decoupled_req = pb_utils.InferenceRequest(
81+
model_name="response_parameters_decoupled",
82+
inputs=[bls_decoupled_input_tensor],
83+
)
84+
bls_decoupled_res = bls_decoupled_req.exec(decoupled=True)
85+
for bls_decoupled_r in bls_decoupled_res:
86+
bls_decoupled_r_params_str = bls_decoupled_r.parameters()
87+
bls_decoupled_r_params = (
88+
json.loads(bls_decoupled_r_params_str)
89+
if bls_decoupled_r_params_str != ""
90+
else {}
7691
)
77-
response = pb_utils.InferenceResponse(error=error)
92+
passed = passed and bls_decoupled_r_params in res_params_decoupled
93+
res_params_decoupled.remove(bls_decoupled_r_params)
94+
passed = passed and len(res_params_decoupled) == 0
7895

96+
output_tensor = pb_utils.Tensor(
97+
"OUTPUT", np.array([[str(passed)]], dtype=np.object_)
98+
)
99+
response = pb_utils.InferenceResponse(output_tensors=[output_tensor])
79100
responses.append(response)
80101

81102
return responses

qa/python_models/response_parameters_bls_decoupled/config.pbtxt

Lines changed: 0 additions & 52 deletions
This file was deleted.

qa/python_models/response_parameters_bls_decoupled/model.py

Lines changed: 0 additions & 85 deletions
This file was deleted.

0 commit comments

Comments
 (0)