@@ -172,8 +172,6 @@ def run_locally(model_path, port, mode, keep_env, keep_image):
172172@click .option ('--file_path' , required = False , help = 'File path of file for the model to predict' )
173173@click .option ('--url' , required = False , help = 'URL to the file for the model to predict' )
174174@click .option ('--bytes' , required = False , help = 'Bytes to the file for the model to predict' )
175- @click .option (
176- '--input_id' , required = False , help = 'Existing input id in the app for the model to predict' )
177175@click .option ('--input_type' , required = False , help = 'Type of input' )
178176@click .option (
179177 '-cc_id' ,
@@ -187,36 +185,28 @@ def run_locally(model_path, port, mode, keep_env, keep_image):
187185 '--inference_params' , required = False , default = '{}' , help = 'Inference parameters to override' )
188186@click .option ('--output_config' , required = False , default = '{}' , help = 'Output config to override' )
189187@click .pass_context
190- def predict (ctx , config , model_id , user_id , app_id , model_url , file_path , url , bytes , input_id ,
191- input_type , compute_cluster_id , nodepool_id , deployment_id , inference_params ,
192- output_config ):
188+ def predict (ctx , config , model_id , user_id , app_id , model_url , file_path , url , bytes , input_type ,
189+ compute_cluster_id , nodepool_id , deployment_id , inference_params , output_config ):
193190 """Predict using the given model"""
194191 import json
195192
196- from clarifai .client .deployment import Deployment
197- from clarifai .client .input import Input
198193 from clarifai .client .model import Model
199- from clarifai .client .nodepool import Nodepool
200194 from clarifai .utils .cli import from_yaml
201195 if config :
202196 config = from_yaml (config )
203- model_id , user_id , app_id , model_url , file_path , url , bytes , input_id , input_type , compute_cluster_id , nodepool_id , deployment_id , inference_params , output_config = (
197+ model_id , user_id , app_id , model_url , file_path , url , bytes , input_type , compute_cluster_id , nodepool_id , deployment_id , inference_params , output_config = (
204198 config .get (k , v )
205199 for k , v in [('model_id' , model_id ), ('user_id' , user_id ), ('app_id' , app_id ), (
206200 'model_url' , model_url ), ('file_path' , file_path ), ('url' , url ), ('bytes' , bytes ), (
207- 'input_id' ,
208- input_id ), ('input_type' ,
209- input_type ), ('compute_cluster_id' ,
210- compute_cluster_id ), ('nodepool_id' , nodepool_id ), (
211- 'deployment_id' ,
212- deployment_id ), ('inference_params' ,
213- inference_params ), ('output_config' ,
214- output_config )])
201+ 'input_type' , input_type ), ('compute_cluster_id' , compute_cluster_id ), (
202+ 'nodepool_id' ,
203+ nodepool_id ), ('deployment_id' ,
204+ deployment_id ), ('inference_params' ,
205+ inference_params ), ('output_config' ,
206+ output_config )])
215207 if sum ([opt [1 ] for opt in [(model_id , 1 ), (user_id , 1 ), (app_id , 1 ), (model_url , 3 )]
216208 if opt [0 ]]) != 3 :
217209 raise ValueError ("Either --model_id & --user_id & --app_id or --model_url must be provided." )
218- if sum ([1 for opt in [file_path , url , bytes , input_id ] if opt ]) != 1 :
219- raise ValueError ("Exactly one of --file_path, --url, --bytes or --input_id must be provided." )
220210 if compute_cluster_id or nodepool_id or deployment_id :
221211 if sum ([
222212 opt [1 ] for opt in [(compute_cluster_id , 0.5 ), (nodepool_id , 0.5 ), (deployment_id , 1 )]
@@ -266,21 +256,5 @@ def predict(ctx, config, model_id, user_id, app_id, model_url, file_path, url, b
266256 nodepool_id = nodepool_id ,
267257 deployment_id = deployment_id ,
268258 inference_params = inference_params ,
269- output_config = output_config )
270- elif input_id :
271- inputs = [Input .get_input (input_id )]
272- runner_selector = None
273- if deployment_id :
274- runner_selector = Deployment .get_runner_selector (
275- user_id = ctx .obj ['user_id' ], deployment_id = deployment_id )
276- elif compute_cluster_id and nodepool_id :
277- runner_selector = Nodepool .get_runner_selector (
278- user_id = ctx .obj ['user_id' ],
279- compute_cluster_id = compute_cluster_id ,
280- nodepool_id = nodepool_id )
281- model_prediction = model .predict (
282- inputs = inputs ,
283- runner_selector = runner_selector ,
284- inference_params = inference_params ,
285- output_config = output_config )
259+ output_config = output_config ) ## TO DO: Add support for input_id
286260 click .echo (model_prediction )
0 commit comments