@@ -171,8 +171,9 @@ TensorflowPlugin::TensorflowPlugin(TfLiteInterpreter* interpreter, Buffer model,
171171 TfLiteStatus status = TfLiteInterpreterAllocateTensors (_interpreter);
172172 if (status != kTfLiteOk ) {
173173 [[unlikely]];
174- throw std::runtime_error (" Failed to allocate memory for input/output tensors! Status: " +
175- tfLiteStatusToString (status));
174+ throw std::runtime_error (
175+ " TFLite: Failed to allocate memory for input/output tensors! Status: " +
176+ tfLiteStatusToString (status));
176177 }
177178
178179 log (" Successfully created Tensorflow Plugin!" );
@@ -205,23 +206,33 @@ void TensorflowPlugin::copyInputBuffers(jsi::Runtime& runtime, jsi::Object input
205206#if DEBUG
206207 if (!inputValues.isArray (runtime)) {
207208 [[unlikely]];
208- throw std::runtime_error (
209- " TFLite: Input Values must be an array, one item for each input tensor!" );
209+ throw jsi::JSError (runtime,
210+ " TFLite: Input Values must be an array, one item for each input tensor!" );
210211 }
211212#endif
212213
213214 jsi::Array array = inputValues.asArray (runtime);
214215 size_t count = array.size (runtime);
215216 if (count != TfLiteInterpreterGetInputTensorCount (_interpreter)) {
216217 [[unlikely]];
217- throw std::runtime_error (
218- " TFLite: Input Values have different size than there are input tensors!" );
218+ throw jsi::JSError (runtime,
219+ " TFLite: Input Values have different size than there are input tensors!" );
219220 }
220221
221222 for (size_t i = 0 ; i < count; i++) {
222223 TfLiteTensor* tensor = TfLiteInterpreterGetInputTensor (_interpreter, i);
223- jsi::Value value = array.getValueAtIndex (runtime, i);
224- TypedArrayBase inputBuffer = getTypedArray (runtime, value.asObject (runtime));
224+ jsi::Object object = array.getValueAtIndex (runtime, i).asObject (runtime);
225+
226+ #if DEBUG
227+ if (!isTypedArray (runtime, object)) {
228+ [[unlikely]];
229+ throw jsi::JSError (
230+ runtime,
231+ " TFLite: Input value is not a TypedArray! (Uint8Array, Uint16Array, Float32Array, etc.)" );
232+ }
233+ #endif
234+
235+ TypedArrayBase inputBuffer = getTypedArray (runtime, std::move (object));
225236 TensorHelpers::updateTensorFromJSBuffer (runtime, tensor, inputBuffer);
226237 }
227238}
@@ -244,7 +255,8 @@ void TensorflowPlugin::run() {
244255 TfLiteStatus status = TfLiteInterpreterInvoke (_interpreter);
245256 if (status != kTfLiteOk ) {
246257 [[unlikely]];
247- throw std::runtime_error (" Failed to run TFLite Model! Status: " + tfLiteStatusToString (status));
258+ throw std::runtime_error (" TFLite: Failed to run TFLite Model! Status: " +
259+ tfLiteStatusToString (status));
248260 }
249261}
250262
@@ -296,7 +308,8 @@ jsi::Value TensorflowPlugin::get(jsi::Runtime& runtime, const jsi::PropNameID& p
296308 TfLiteTensor* tensor = TfLiteInterpreterGetInputTensor (_interpreter, i);
297309 if (tensor == nullptr ) {
298310 [[unlikely]];
299- throw jsi::JSError (runtime, " Failed to get input tensor " + std::to_string (i) + " !" );
311+ throw jsi::JSError (runtime,
312+ " TFLite: Failed to get input tensor " + std::to_string (i) + " !" );
300313 }
301314
302315 jsi::Object object = TensorHelpers::tensorToJSObject (runtime, tensor);
@@ -310,7 +323,8 @@ jsi::Value TensorflowPlugin::get(jsi::Runtime& runtime, const jsi::PropNameID& p
310323 const TfLiteTensor* tensor = TfLiteInterpreterGetOutputTensor (_interpreter, i);
311324 if (tensor == nullptr ) {
312325 [[unlikely]];
313- throw jsi::JSError (runtime, " Failed to get output tensor " + std::to_string (i) + " !" );
326+ throw jsi::JSError (runtime,
327+ " TFLite: Failed to get output tensor " + std::to_string (i) + " !" );
314328 }
315329
316330 jsi::Object object = TensorHelpers::tensorToJSObject (runtime, tensor);
0 commit comments