File tree Expand file tree Collapse file tree 4 files changed +5
-8
lines changed
src/python/library/tritonclient Expand file tree Collapse file tree 4 files changed +5
-8
lines changed Original file line number Diff line number Diff line change @@ -144,6 +144,7 @@ def set_shape(self, shape):
144144 """
145145 self ._input .ClearField ("shape" )
146146 self ._input .shape .extend (shape )
147+ self .validate_data ()
147148 return self
148149
149150 def set_data_from_numpy (self , input_tensor ):
Original file line number Diff line number Diff line change 11#!/usr/bin/env python3
22
3- # Copyright 2023-2024 , NVIDIA CORPORATION & AFFILIATES. All rights reserved.
3+ # Copyright 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
44#
55# Redistribution and use in source and binary forms, with or without
66# modification, are permitted provided that the following conditions
@@ -96,7 +96,6 @@ def _get_inference_request(
9696 if request_id != "" :
9797 request .id = request_id
9898 for infer_input in inputs :
99- infer_input .validate_data ()
10099 request .inputs .extend ([infer_input ._get_tensor ()])
101100 if infer_input ._get_content () is not None :
102101 request .raw_input_contents .extend ([infer_input ._get_content ()])
Original file line number Diff line number Diff line change @@ -145,6 +145,7 @@ def set_shape(self, shape):
145145 The updated input
146146 """
147147 self ._shape = shape
148+ self .validate_data ()
148149 return self
149150
150151 def set_data_from_numpy (self , input_tensor , binary_data = True ):
Original file line number Diff line number Diff line change 11#!/usr/bin/env python3
22
3- # Copyright 2023-2024 , NVIDIA CORPORATION & AFFILIATES. All rights reserved.
3+ # Copyright 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
44#
55# Redistribution and use in source and binary forms, with or without
66# modification, are permitted provided that the following conditions
@@ -106,11 +106,7 @@ def _get_inference_request(
106106 if timeout is not None :
107107 parameters ["timeout" ] = timeout
108108
109- infer_request ["inputs" ] = []
110- for infer_input in inputs :
111- infer_input .validate_data ()
112- infer_request ["inputs" ].append (infer_input ._get_tensor ())
113-
109+ infer_request ["inputs" ] = [this_input ._get_tensor () for this_input in inputs ]
114110 if outputs :
115111 infer_request ["outputs" ] = [
116112 this_output ._get_tensor () for this_output in outputs
You can’t perform that action at this time.
0 commit comments