22
22
23
23
24
24
def model_fn (model_dir ):
25
- """Placeholder docstring """
25
+ """Overrides default method for loading a model """
26
26
shared_libs_path = Path (model_dir + "/shared_libs" )
27
27
28
28
if shared_libs_path .exists ():
@@ -36,14 +36,12 @@ def model_fn(model_dir):
36
36
if isinstance (obj [0 ], InferenceSpec ):
37
37
inference_spec , schema_builder = obj
38
38
39
- logger .info ("in model_fn" )
40
-
41
39
if inference_spec :
42
40
return partial (inference_spec .invoke , model = inference_spec .load (model_dir ))
43
41
44
42
45
43
def input_fn (input_data , content_type ):
46
- """Placeholder docstring """
44
+ """Deserializes the bytes that were received from the model server """
47
45
try :
48
46
if hasattr (schema_builder , "custom_input_translator" ):
49
47
return schema_builder .custom_input_translator .deserialize (
@@ -59,13 +57,12 @@ def input_fn(input_data, content_type):
59
57
60
58
61
59
def predict_fn (input_data , predict_callable ):
62
- """Placeholder docstring"""
63
- logger .info ("in predict_fn" )
60
+ """Invokes the model that is taken in by model server"""
64
61
return predict_callable (input_data )
65
62
66
63
67
64
def output_fn (predictions , accept_type ):
68
- """Placeholder docstring """
65
+ """Prediction is serialized to bytes and sent back to the customer """
69
66
try :
70
67
if hasattr (schema_builder , "custom_output_translator" ):
71
68
return schema_builder .custom_output_translator .serialize (predictions , accept_type )
0 commit comments