2626import org .tensorflow .lite .task .core .TaskJniUtils .EmptyHandleProvider ;
2727import org .tensorflow .lite .task .core .TaskJniUtils .MultipleBuffersHandleProvider ;
2828
29- /** Task API for BertQA models. */
29+ /**
30+ * Returns the most possible answers on a given question for QA models (BERT, Albert, etc.).
31+ *
32+ * <p>The API expects a Bert based TFLite model with metadata containing the following information:
33+ *
34+ * <ul>
35+ * <li>input_process_units for Wordpiece/Sentencepiece Tokenizer - Wordpiece Tokenizer can be used
36+ * for a <a
37+ * href="https://tfhub.dev/tensorflow/lite-model/mobilebert/1/default/1">MobileBert</a> model,
38+ * Sentencepiece Tokenizer Tokenizer can be used for an <a
39+ * href="https://tfhub.dev/tensorflow/lite-model/albert_lite_base/squadv1/1">Albert</a> model.
40+ * <li>3 input tensors with names "ids", "mask" and "segment_ids".
41+ * <li>2 output tensors with names "end_logits" and "start_logits".
42+ * </ul>
43+ */
3044public class BertQuestionAnswerer extends BaseTaskApi implements QuestionAnswerer {
3145 private static final String BERT_QUESTION_ANSWERER_NATIVE_LIBNAME = "task_text_jni" ;
32-
33- private BertQuestionAnswerer (long nativeHandle ) {
34- super (nativeHandle );
35- }
46+ private static final int OPTIONAL_FD_LENGTH = -1 ;
47+ private static final int OPTIONAL_FD_OFFSET = -1 ;
3648
3749 /**
38- * Generic API to create the QuestionAnswerer for bert models with metadata populated. The API
39- * expects a Bert based TFLite model with metadata containing the following information:
40- *
41- * <ul>
42- * <li>input_process_units for Wordpiece/Sentencepiece Tokenizer - Wordpiece Tokenizer can be
43- * used for a <a
44- * href="https://tfhub.dev/tensorflow/lite-model/mobilebert/1/default/1">MobileBert</a>
45- * model, Sentencepiece Tokenizer Tokenizer can be used for an <a
46- * href="https://tfhub.dev/tensorflow/lite-model/albert_lite_base/squadv1/1">Albert</a>
47- * model.
48- * <li>3 input tensors with names "ids", "mask" and "segment_ids".
49- * <li>2 output tensors with names "end_logits" and "start_logits".
50- * </ul>
50+ * Creates a {@link BertQuestionAnswerer} instance from the default {@link
51+ * BertQuestionAnswererOptions}.
5152 *
5253 * @param context android context
53- * @param pathToModel file path to the model with metadata. Note: The model should not be
54- * compressed
55- * @return {@link BertQuestionAnswerer} instance
56- * @throws IOException If model file fails to load.
54+ * @param modelPath file path to the model with metadata. Note: The model should not be compressed
55+ * @return a {@link BertQuestionAnswerer} instance
56+ * @throws IOException if model file fails to load
57+ * @throws IllegalArgumentException if an argument is invalid
58+ * @throws IllegalStateException if there is an internal error
59+ * @throws RuntimeException if there is an otherwise unspecified error
5760 */
58- public static BertQuestionAnswerer createFromFile (Context context , String pathToModel )
61+ public static BertQuestionAnswerer createFromFile (Context context , String modelPath )
5962 throws IOException {
6063 return new BertQuestionAnswerer (
6164 TaskJniUtils .createHandleWithMultipleAssetFilesFromLibrary (
@@ -67,97 +70,98 @@ public long createHandle(ByteBuffer... buffers) {
6770 }
6871 },
6972 BERT_QUESTION_ANSWERER_NATIVE_LIBNAME ,
70- pathToModel ));
73+ modelPath ));
7174 }
7275
7376 /**
74- * Generic API to create the QuestionAnswerer for bert models with metadata populated. The API
75- * expects a Bert based TFLite model with metadata containing the following information:
76- *
77- * <ul>
78- * <li>input_process_units for Wordpiece/Sentencepiece Tokenizer - Wordpiece Tokenizer can be
79- * used for a <a
80- * href="https://tfhub.dev/tensorflow/lite-model/mobilebert/1/default/1">MobileBert</a>
81- * model, Sentencepiece Tokenizer Tokenizer can be used for an <a
82- * href="https://tfhub.dev/tensorflow/lite-model/albert_lite_base/squadv1/1">Albert</a>
83- * model.
84- * <li>3 input tensors with names "ids", "mask" and "segment_ids".
85- * <li>2 output tensors with names "end_logits" and "start_logits".
86- * </ul>
77+ * Creates a {@link BertQuestionAnswerer} instance from the default {@link
78+ * BertQuestionAnswererOptions}.
8779 *
88- * @param modelFile {@link File} object of the model
89- * @return {@link BertQuestionAnswerer} instance
90- * @throws IOException If model file fails to load.
80+ * @param modelFile a {@link File} object of the model
81+ * @return a {@link BertQuestionAnswerer} instance
82+ * @throws IOException if model file fails to load
83+ * @throws IllegalArgumentException if an argument is invalid
84+ * @throws IllegalStateException if there is an internal error
85+ * @throws RuntimeException if there is an otherwise unspecified error
9186 */
92- public static BertQuestionAnswerer createFromFile (File modelFile )
93- throws IOException {
87+ public static BertQuestionAnswerer createFromFile (File modelFile ) throws IOException {
9488 try (ParcelFileDescriptor descriptor =
9589 ParcelFileDescriptor .open (modelFile , ParcelFileDescriptor .MODE_READ_ONLY )) {
9690 return new BertQuestionAnswerer (
9791 TaskJniUtils .createHandleFromLibrary (
9892 new EmptyHandleProvider () {
9993 @ Override
10094 public long createHandle () {
101- return initJniWithFileDescriptor (descriptor .getFd ());
95+ return initJniWithFileDescriptor (
96+ /*fileDescriptor=*/ descriptor .getFd (),
97+ /*fileDescriptorLength=*/ OPTIONAL_FD_LENGTH ,
98+ /*fileDescriptorOffset=*/ OPTIONAL_FD_OFFSET );
10299 }
103100 },
104101 BERT_QUESTION_ANSWERER_NATIVE_LIBNAME ));
105102 }
106103 }
107104
108105 /**
109- * Creates the API instance with a bert model and vocabulary file.
106+ * Creates a {@link BertQuestionAnswerer} instance with a Bert model and a vocabulary file.
110107 *
111108 * <p>One suitable model is: https://tfhub.dev/tensorflow/lite-model/mobilebert/1/default/1
112109 *
113110 * @param context android context
114- * @param pathToModel file path to the bert model. Note: The model should not be compressed
115- * @param pathToVocab file path to the vocabulary file. Note: The file should not be compressed
116- * @return {@link BertQuestionAnswerer} instance
117- * @throws IOException If model file fails to load.
111+ * @param modelPath file path to the Bert model. Note: The model should not be compressed
112+ * @param vocabPath file path to the vocabulary file. Note: The file should not be compressed
113+ * @return a {@link BertQuestionAnswerer} instance
114+ * @throws IOException If model file fails to load
115+ * @throws IllegalArgumentException if an argument is invalid
116+ * @throws IllegalStateException if there is an internal error
117+ * @throws RuntimeException if there is an otherwise unspecified error
118118 */
119119 public static BertQuestionAnswerer createBertQuestionAnswererFromFile (
120- Context context , String pathToModel , String pathToVocab ) throws IOException {
120+ Context context , String modelPath , String vocabPath ) throws IOException {
121121 return new BertQuestionAnswerer (
122122 TaskJniUtils .createHandleWithMultipleAssetFilesFromLibrary (
123123 context ,
124124 new MultipleBuffersHandleProvider () {
125125 @ Override
126126 public long createHandle (ByteBuffer ... buffers ) {
127- return BertQuestionAnswerer . initJniWithBertByteBuffers (buffers );
127+ return initJniWithBertByteBuffers (buffers );
128128 }
129129 },
130130 BERT_QUESTION_ANSWERER_NATIVE_LIBNAME ,
131- pathToModel ,
132- pathToVocab ));
131+ modelPath ,
132+ vocabPath ));
133133 }
134134
135135 /**
136- * Creates the API instance with an albert model and sentence piece model file.
136+ * Creates a {@link BertQuestionAnswerer} instance with an Albert model and a sentence piece model
137+ * file.
137138 *
138139 * <p>One suitable model is: https://tfhub.dev/tensorflow/lite-model/albert_lite_base/squadv1/1
139140 *
140141 * @param context android context
141- * @param pathToModel file path to the albert model. Note: The model should not be compressed
142- * @param pathToSentencePieceModel file path to the sentence piece model file. Note: The model
142+ * @param modelPath file path to the Albert model. Note: The model should not be compressed
143+ * @param sentencePieceModelPath file path to the sentence piece model file. Note: The model
143144 * should not be compressed
144- * @return {@link BertQuestionAnswerer} instance
145- * @throws IOException If model file fails to load.
145+ * @return a {@link BertQuestionAnswerer} instance
146+ * @throws IOException If model file fails to load
147+ * @throws IllegalArgumentException if an argument is invalid
148+ * @throws IllegalStateException if there is an internal error
149+ * @throws RuntimeException if there is an otherwise unspecified error
146150 */
147151 public static BertQuestionAnswerer createAlbertQuestionAnswererFromFile (
148- Context context , String pathToModel , String pathToSentencePieceModel ) throws IOException {
152+ Context context , String modelPath , String sentencePieceModelPath ) throws IOException {
149153 return new BertQuestionAnswerer (
150154 TaskJniUtils .createHandleWithMultipleAssetFilesFromLibrary (
151155 context ,
152156 new MultipleBuffersHandleProvider () {
153157 @ Override
154158 public long createHandle (ByteBuffer ... buffers ) {
155- return BertQuestionAnswerer . initJniWithAlbertByteBuffers (buffers );
159+ return initJniWithAlbertByteBuffers (buffers );
156160 }
157161 },
158162 BERT_QUESTION_ANSWERER_NATIVE_LIBNAME ,
159- pathToModel ,
160- pathToSentencePieceModel ));
163+ modelPath ,
164+ sentencePieceModelPath ));
161165 }
162166
163167 @ Override
@@ -166,6 +170,10 @@ public List<QaAnswer> answer(String context, String question) {
166170 return answerNative (getNativeHandle (), context , question );
167171 }
168172
173+ private BertQuestionAnswerer (long nativeHandle ) {
174+ super (nativeHandle );
175+ }
176+
169177 // modelBuffers[0] is tflite model file buffer, and modelBuffers[1] is vocab file buffer.
170178 private static native long initJniWithBertByteBuffers (ByteBuffer ... modelBuffers );
171179
@@ -176,7 +184,8 @@ public List<QaAnswer> answer(String context, String question) {
176184 // modelBuffers[0] is tflite model file buffer with metadata to specify which tokenizer to use.
177185 private static native long initJniWithModelWithMetadataByteBuffers (ByteBuffer ... modelBuffers );
178186
179- private static native long initJniWithFileDescriptor (int fd );
187+ private static native long initJniWithFileDescriptor (
188+ int fileDescriptor , long fileDescriptorLength , long fileDescriptorOffset );
180189
181190 private static native List <QaAnswer > answerNative (
182191 long nativeHandle , String context , String question );
0 commit comments