Skip to content

Commit 237f2e1

Browse files
committed
chore: unify naming of the errors
1 parent 98d843e commit 237f2e1

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

51 files changed

+261
-257
lines changed

docs/docs/03-typescript-api/04-error-handling.md

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -4,16 +4,16 @@ title: Error handling
44

55
## Overview
66

7-
In order to handle different types of errors, you can use `instanceof` with our exported class `ExecutorchError` and its `code` property. This allows you to check what exactly went wrong and act accordingly.
7+
In order to handle different types of errors, you can use `instanceof` with our exported class `RnExecutorchError` and its `code` property. This allows you to check what exactly went wrong and act accordingly.
88

99
This example uses the `LLMModule`, and then tries to change its `generationConfig`. As the topp param has to be a value between 0 and 1 (inclusive), the `.configure()` method will throw an error with a code InvalidConfig.
1010

1111
```typescript
1212
import {
1313
LLMModule,
1414
LLAMA3_2_1B_QLORA,
15-
ExecutorchError,
16-
ETErrorCode,
15+
RnExecutorchError,
16+
RnExecutorchErrorCode,
1717
} from 'react-native-executorch';
1818

1919
const llm = new LLMModule({
@@ -28,8 +28,8 @@ try {
2828
await llm.configure({ topp: 1.5 }); // This will throw InvalidConfig error
2929
} catch (err) {
3030
if (
31-
err instanceof ExecutorchError &&
32-
err.code === ETErrorCode.InvalidConfig
31+
err instanceof RnExecutorchError &&
32+
err.code === RnExecutorchErrorCode.InvalidConfig
3333
) {
3434
console.error('Invalid configuration:', err.message);
3535
// Handle the invalid config - set default values
@@ -43,11 +43,11 @@ try {
4343
try {
4444
await llm.sendMessage('Hello, World!');
4545
} catch (err) {
46-
if (err instanceof ExecutorchError) {
47-
if (err.code === ETErrorCode.ModuleNotLoaded) {
46+
if (err instanceof RnExecutorchError) {
47+
if (err.code === RnExecutorchErrorCode.ModuleNotLoaded) {
4848
console.error('Model not loaded:', err.message);
4949
// Load the model first
50-
} else if (err.code === ETErrorCode.ModelGenerating) {
50+
} else if (err.code === RnExecutorchErrorCode.ModelGenerating) {
5151
console.error('Model is already generating:', err.message);
5252
// Wait for current generation to complete
5353
} else {
@@ -68,7 +68,7 @@ llm.delete();
6868

6969
## Reference
7070

71-
All errors in React Native ExecuTorch inherit from `ExecutorchError` and include a `code` property from the `ETErrorCode` enum. Below is a comprehensive list of all possible errors, organized by category.
71+
All errors in React Native ExecuTorch inherit from `RnExecutorchError` and include a `code` property from the `RnExecutorchErrorCode` enum. Below is a comprehensive list of all possible errors, organized by category.
7272

7373
### Module State Errors
7474

packages/react-native-executorch/common/rnexecutorch/Error.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
namespace rnexecutorch {
1111

1212
using ErrorVariant =
13-
std::variant<RnExecutorchInternalError, executorch::runtime::Error>;
13+
std::variant<RnExecutorchErrorCode, executorch::runtime::Error>;
1414

1515
class RnExecutorchError : public std::runtime_error {
1616
public:
@@ -25,11 +25,11 @@ class RnExecutorchError : public std::runtime_error {
2525
errorCode);
2626
}
2727

28-
bool isInternalError() const noexcept {
29-
return std::holds_alternative<RnExecutorchInternalError>(errorCode);
28+
bool isRnExecutorchError() const noexcept {
29+
return std::holds_alternative<RnExecutorchErrorCode>(errorCode);
3030
}
3131

32-
bool isExecuTorchError() const noexcept {
32+
bool isExecuTorchRuntimeError() const noexcept {
3333
return std::holds_alternative<executorch::runtime::Error>(errorCode);
3434
}
3535
};

packages/react-native-executorch/common/rnexecutorch/ErrorCodes.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88
namespace rnexecutorch {
99

10-
enum class RnExecutorchInternalError : int32_t {
10+
enum class RnExecutorchErrorCode : int32_t {
1111
/**
1212
* An umbrella-error that is thrown usually when something unexpected happens,
1313
* for example a 3rd-party library error.

packages/react-native-executorch/common/rnexecutorch/TokenizerModule.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ void TokenizerModule::ensureTokenizerLoaded(
1818
const std::string &methodName) const {
1919
if (!tokenizer) {
2020
throw RnExecutorchError(
21-
RnExecutorchInternalError::ModuleNotLoaded,
21+
RnExecutorchErrorCode::ModuleNotLoaded,
2222
methodName + " function was called on an uninitialized tokenizer!");
2323
}
2424
}

packages/react-native-executorch/common/rnexecutorch/data_processing/FileUtils.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ inline std::string getTimeID() {
1818
inline std::string loadBytesFromFile(const std::string &path) {
1919
std::ifstream fs(path, std::ios::in | std::ios::binary);
2020
if (fs.fail()) {
21-
throw RnExecutorchError(RnExecutorchInternalError::FileReadFailed,
21+
throw RnExecutorchError(RnExecutorchErrorCode::FileReadFailed,
2222
"Failed to open tokenizer file!");
2323
}
2424
std::string data;

packages/react-native-executorch/common/rnexecutorch/data_processing/ImageProcessing.cpp

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ std::string saveToTempFile(const cv::Mat &image) {
6868
std::filesystem::path filePath = tempDir / filename;
6969

7070
if (!cv::imwrite(filePath.string(), image)) {
71-
throw RnExecutorchError(RnExecutorchInternalError::FileWriteFailed,
71+
throw RnExecutorchError(RnExecutorchErrorCode::FileWriteFailed,
7272
"Failed to save the image: " + filePath.string());
7373
}
7474

@@ -87,7 +87,7 @@ cv::Mat readImage(const std::string &imageURI) {
8787
++segmentIndex;
8888
}
8989
if (segmentIndex != 1) {
90-
throw RnExecutorchError(RnExecutorchInternalError::FileReadFailed,
90+
throw RnExecutorchError(RnExecutorchErrorCode::FileReadFailed,
9191
"Read image error: invalid base64 URI");
9292
}
9393
auto data = base64_decode(stringData);
@@ -104,12 +104,12 @@ cv::Mat readImage(const std::string &imageURI) {
104104
cv::Mat(1, imageData.size(), CV_8UC1, (void *)imageData.data()),
105105
cv::IMREAD_COLOR);
106106
} else {
107-
throw RnExecutorchError(RnExecutorchInternalError::FileReadFailed,
107+
throw RnExecutorchError(RnExecutorchErrorCode::FileReadFailed,
108108
"Read image error: unknown protocol");
109109
}
110110

111111
if (image.empty()) {
112-
throw RnExecutorchError(RnExecutorchInternalError::FileReadFailed,
112+
throw RnExecutorchError(RnExecutorchErrorCode::FileReadFailed,
113113
"Read image error: invalid argument");
114114
}
115115

@@ -225,7 +225,7 @@ readImageToTensor(const std::string &path,
225225
"Unexpected tensor size, expected at least 2 dimentions "
226226
"but got: %zu.",
227227
tensorDims.size());
228-
throw RnExecutorchError(RnExecutorchInternalError::UnexpectedNumInputs,
228+
throw RnExecutorchError(RnExecutorchErrorCode::UnexpectedNumInputs,
229229
errorMessage);
230230
}
231231
cv::Size tensorSize = cv::Size(tensorDims[tensorDims.size() - 1],

packages/react-native-executorch/common/rnexecutorch/data_processing/Numerical.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ void softmaxWithTemperature(std::span<float> input, float temperature) {
3838

3939
if (temperature <= 0.0F) {
4040
throw RnExecutorchError(
41-
RnExecutorchInternalError::InvalidConfig,
41+
RnExecutorchErrorCode::InvalidConfig,
4242
"Temperature must be greater than 0 for softmax with temperature!");
4343
}
4444

@@ -78,7 +78,7 @@ std::vector<float> meanPooling(std::span<const float> modelOutput,
7878
<< "by the size of attention mask but got size: " << modelOutput.size()
7979
<< " for model output and size: " << attnMask.size()
8080
<< " for attention mask";
81-
throw RnExecutorchError(RnExecutorchInternalError::InvalidConfig, ss.str());
81+
throw RnExecutorchError(RnExecutorchErrorCode::InvalidConfig, ss.str());
8282
}
8383

8484
auto attnMaskLength = attnMask.size();

packages/react-native-executorch/common/rnexecutorch/data_processing/gzip.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ size_t deflateSize(const std::string &input) {
1818
if (::deflateInit2(&strm, Z_DEFAULT_COMPRESSION, Z_DEFLATED,
1919
MAX_WBITS + kGzipWrapper, kMemLevel,
2020
Z_DEFAULT_STRATEGY) != Z_OK) {
21-
throw RnExecutorchError(RnExecutorchInternalError::UnknownError,
21+
throw RnExecutorchError(RnExecutorchErrorCode::UnknownError,
2222
"deflateInit2 failed");
2323
}
2424

@@ -37,7 +37,7 @@ size_t deflateSize(const std::string &input) {
3737
ret = ::deflate(&strm, strm.avail_in ? Z_NO_FLUSH : Z_FINISH);
3838
if (ret == Z_STREAM_ERROR) {
3939
::deflateEnd(&strm);
40-
throw RnExecutorchError(RnExecutorchInternalError::UnknownError,
40+
throw RnExecutorchError(RnExecutorchErrorCode::UnknownError,
4141
"deflate stream error");
4242
}
4343

packages/react-native-executorch/common/rnexecutorch/models/BaseModel.cpp

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ BaseModel::BaseModel(const std::string &modelSource,
3131
std::vector<int32_t> BaseModel::getInputShape(std::string method_name,
3232
int32_t index) const {
3333
if (!module_) {
34-
throw RnExecutorchError(RnExecutorchInternalError::ModuleNotLoaded,
34+
throw RnExecutorchError(RnExecutorchErrorCode::ModuleNotLoaded,
3535
"Model not loaded: Cannot get input shape");
3636
}
3737

@@ -58,7 +58,7 @@ std::vector<int32_t> BaseModel::getInputShape(std::string method_name,
5858
std::vector<std::vector<int32_t>>
5959
BaseModel::getAllInputShapes(std::string methodName) const {
6060
if (!module_) {
61-
throw RnExecutorchError(RnExecutorchInternalError::ModuleNotLoaded,
61+
throw RnExecutorchError(RnExecutorchErrorCode::ModuleNotLoaded,
6262
"Model not loaded: Cannot get all input shapes");
6363
}
6464

@@ -91,7 +91,7 @@ BaseModel::getAllInputShapes(std::string methodName) const {
9191
std::vector<JSTensorViewOut>
9292
BaseModel::forwardJS(std::vector<JSTensorViewIn> tensorViewVec) const {
9393
if (!module_) {
94-
throw RnExecutorchError(RnExecutorchInternalError::ModuleNotLoaded,
94+
throw RnExecutorchError(RnExecutorchErrorCode::ModuleNotLoaded,
9595
"Model not loaded: Cannot perform forward pass");
9696
}
9797
std::vector<executorch::runtime::EValue> evalues;
@@ -141,7 +141,7 @@ BaseModel::forwardJS(std::vector<JSTensorViewIn> tensorViewVec) const {
141141
Result<executorch::runtime::MethodMeta>
142142
BaseModel::getMethodMeta(const std::string &methodName) const {
143143
if (!module_) {
144-
throw RnExecutorchError(RnExecutorchInternalError::ModuleNotLoaded,
144+
throw RnExecutorchError(RnExecutorchErrorCode::ModuleNotLoaded,
145145
"Model not loaded: Cannot get method meta");
146146
}
147147
return module_->method_meta(methodName);
@@ -150,7 +150,7 @@ BaseModel::getMethodMeta(const std::string &methodName) const {
150150
Result<std::vector<EValue>>
151151
BaseModel::forward(const EValue &input_evalue) const {
152152
if (!module_) {
153-
throw RnExecutorchError(RnExecutorchInternalError::ModuleNotLoaded,
153+
throw RnExecutorchError(RnExecutorchErrorCode::ModuleNotLoaded,
154154
"Model not loaded: Cannot perform forward pass");
155155
}
156156
return module_->forward(input_evalue);
@@ -159,7 +159,7 @@ BaseModel::forward(const EValue &input_evalue) const {
159159
Result<std::vector<EValue>>
160160
BaseModel::forward(const std::vector<EValue> &input_evalues) const {
161161
if (!module_) {
162-
throw RnExecutorchError(RnExecutorchInternalError::ModuleNotLoaded,
162+
throw RnExecutorchError(RnExecutorchErrorCode::ModuleNotLoaded,
163163
"Model not loaded: Cannot perform forward pass");
164164
}
165165
return module_->forward(input_evalues);
@@ -169,7 +169,7 @@ Result<std::vector<EValue>>
169169
BaseModel::execute(const std::string &methodName,
170170
const std::vector<EValue> &input_value) const {
171171
if (!module_) {
172-
throw RnExecutorchError(RnExecutorchInternalError::ModuleNotLoaded,
172+
throw RnExecutorchError(RnExecutorchErrorCode::ModuleNotLoaded,
173173
"Model not loaded, cannot run execute");
174174
}
175175
return module_->execute(methodName, input_value);

packages/react-native-executorch/common/rnexecutorch/models/classification/Classification.cpp

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ Classification::Classification(const std::string &modelSource,
1515
: BaseModel(modelSource, callInvoker) {
1616
auto inputShapes = getAllInputShapes();
1717
if (inputShapes.size() == 0) {
18-
throw RnExecutorchError(RnExecutorchInternalError::UnexpectedNumInputs,
18+
throw RnExecutorchError(RnExecutorchErrorCode::UnexpectedNumInputs,
1919
"Model seems to not take any input tensors.");
2020
}
2121
std::vector<int32_t> modelInputShape = inputShapes[0];
@@ -25,7 +25,7 @@ Classification::Classification(const std::string &modelSource,
2525
"Unexpected model input size, expected at least 2 dimentions "
2626
"but got: %zu.",
2727
modelInputShape.size());
28-
throw RnExecutorchError(RnExecutorchInternalError::WrongDimensions,
28+
throw RnExecutorchError(RnExecutorchErrorCode::WrongDimensions,
2929
errorMessage);
3030
}
3131
modelImageSize = cv::Size(modelInputShape[modelInputShape.size() - 1],
@@ -59,7 +59,7 @@ Classification::postprocess(const Tensor &tensor) {
5959
"Unexpected classification output size, was expecting: %zu classes "
6060
"but got: %zu classes",
6161
constants::kImagenet1kV1Labels.size(), resultVec.size());
62-
throw RnExecutorchError(RnExecutorchInternalError::InvalidModelOutput,
62+
throw RnExecutorchError(RnExecutorchErrorCode::InvalidModelOutput,
6363
errorMessage);
6464
}
6565

0 commit comments

Comments
 (0)