Skip to content

Commit 0961bb8

Browse files
committed
Add in-memory log buffer in Android JNI
1 parent 97a4600 commit 0961bb8

File tree

4 files changed

+227
-120
lines changed

4 files changed

+227
-120
lines changed

extension/android/CMakeLists.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -190,4 +190,4 @@ target_include_directories(
190190

191191
target_compile_options(executorch_jni PUBLIC ${_common_compile_options})
192192

193-
target_link_libraries(executorch_jni ${link_libraries})
193+
target_link_libraries(executorch_jni ${link_libraries} log)

extension/android/jni/jni_layer.cpp

Lines changed: 78 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -33,8 +33,45 @@
3333
#include <fbjni/ByteBuffer.h>
3434
#include <fbjni/fbjni.h>
3535

36+
using namespace executorch::extension;
37+
using namespace torch::executor;
38+
3639
#ifdef __ANDROID__
3740
#include <android/log.h>
41+
#include <mutex>
42+
#include <sstream>
43+
44+
// Number of entries to store in the in-memory log buffer.
45+
const size_t log_buffer_length = 16;
46+
47+
struct log_entry {
48+
et_timestamp_t timestamp;
49+
et_pal_log_level_t level;
50+
std::string filename;
51+
std::string function;
52+
size_t line;
53+
std::string message;
54+
55+
log_entry(
56+
et_timestamp_t timestamp,
57+
et_pal_log_level_t level,
58+
const char* filename,
59+
const char* function,
60+
size_t line,
61+
const char* message,
62+
size_t length)
63+
: timestamp(timestamp),
64+
level(level),
65+
filename(filename),
66+
function(function),
67+
line(line),
68+
message(message, length) {}
69+
};
70+
71+
namespace {
72+
std::vector<log_entry> log_buffer_;
73+
std::mutex log_buffer_mutex_;
74+
} // namespace
3875

3976
// For Android, write to logcat
4077
void et_pal_emit_log_message(
@@ -45,6 +82,15 @@ void et_pal_emit_log_message(
4582
size_t line,
4683
const char* message,
4784
size_t length) {
85+
std::lock_guard<std::mutex> guard(log_buffer_mutex_);
86+
87+
while (log_buffer_.size() >= log_buffer_length) {
88+
log_buffer_.erase(log_buffer_.begin());
89+
}
90+
91+
log_buffer_.emplace_back(
92+
timestamp, level, filename, function, line, message, length);
93+
4894
int android_log_level = ANDROID_LOG_UNKNOWN;
4995
if (level == 'D') {
5096
android_log_level = ANDROID_LOG_DEBUG;
@@ -60,9 +106,6 @@ void et_pal_emit_log_message(
60106
}
61107
#endif
62108

63-
using namespace executorch::extension;
64-
using namespace torch::executor;
65-
66109
namespace executorch::extension {
67110
class TensorHybrid : public facebook::jni::HybridClass<TensorHybrid> {
68111
public:
@@ -391,12 +434,44 @@ class ExecuTorchJni : public facebook::jni::HybridClass<ExecuTorchJni> {
391434
return jresult;
392435
}
393436

437+
facebook::jni::local_ref<facebook::jni::JArrayClass<jstring>>
438+
readLogBuffer() {
439+
#ifdef __ANDROID__
440+
std::lock_guard<std::mutex> guard(log_buffer_mutex_);
441+
442+
const auto size = log_buffer_.size();
443+
facebook::jni::local_ref<facebook::jni::JArrayClass<jstring>> ret =
444+
facebook::jni::JArrayClass<jstring>::newArray(size);
445+
446+
for (auto i = 0u; i < size; i++) {
447+
const auto& entry = log_buffer_[i];
448+
// Format the log entry as "[TIMESTAMP FUNCTION FILE:LINE] LEVEL MESSAGE".
449+
std::stringstream ss;
450+
ss << "[" << entry.timestamp << " " << entry.function << " "
451+
<< entry.filename << ":" << entry.line << "] "
452+
<< static_cast<char>(entry.level) << " " << entry.message;
453+
454+
facebook::jni::local_ref<facebook::jni::JString> jstr_message =
455+
facebook::jni::make_jstring(ss.str().c_str());
456+
(*ret)[i] = jstr_message;
457+
}
458+
459+
return ret;
460+
#else
461+
return facebook::jni::JArrayClass<String>::newArray(0);
462+
#endif
463+
}
464+
394465
static void registerNatives() {
395466
registerHybrid({
396467
makeNativeMethod("initHybrid", ExecuTorchJni::initHybrid),
397468
makeNativeMethod("forward", ExecuTorchJni::forward),
398469
makeNativeMethod("execute", ExecuTorchJni::execute),
399470
makeNativeMethod("loadMethod", ExecuTorchJni::load_method),
471+
472+
#ifdef __ANDROID__
473+
makeNativeMethod("readLogBuffer", ExecuTorchJni::readLogBuffer),
474+
#endif
400475
});
401476
}
402477
};

extension/android/src/main/java/org/pytorch/executorch/Module.java

Lines changed: 100 additions & 82 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010

1111
import com.facebook.soloader.nativeloader.NativeLoader;
1212
import com.facebook.soloader.nativeloader.SystemDelegate;
13+
1314
import org.pytorch.executorch.annotations.Experimental;
1415

1516
/**
@@ -20,92 +21,109 @@
2021
@Experimental
2122
public class Module {
2223

23-
/** Load mode for the module. Load the whole file as a buffer. */
24-
public static final int LOAD_MODE_FILE = 0;
24+
/**
25+
* Load mode for the module. Load the whole file as a buffer.
26+
*/
27+
public static final int LOAD_MODE_FILE = 0;
28+
29+
/**
30+
* Load mode for the module. Use mmap to load pages into memory.
31+
*/
32+
public static final int LOAD_MODE_MMAP = 1;
33+
34+
/**
35+
* Load mode for the module. Use memory locking and handle errors.
36+
*/
37+
public static final int LOAD_MODE_MMAP_USE_MLOCK = 2;
38+
39+
/**
40+
* Load mode for the module. Use memory locking and ignore errors.
41+
*/
42+
public static final int LOAD_MODE_MMAP_USE_MLOCK_IGNORE_ERRORS = 3;
2543

26-
/** Load mode for the module. Use mmap to load pages into memory. */
27-
public static final int LOAD_MODE_MMAP = 1;
44+
/**
45+
* Reference to the NativePeer object of this module.
46+
*/
47+
private NativePeer mNativePeer;
2848

29-
/** Load mode for the module. Use memory locking and handle errors. */
30-
public static final int LOAD_MODE_MMAP_USE_MLOCK = 2;
49+
/**
50+
* Loads a serialized ExecuTorch module from the specified path on the disk.
51+
*
52+
* @param modelPath path to file that contains the serialized ExecuTorch module.
53+
* @param loadMode load mode for the module. See constants in {@link Module}.
54+
* @return new {@link org.pytorch.executorch.Module} object which owns the model module.
55+
*/
56+
public static Module load(final String modelPath, int loadMode) {
57+
if (!NativeLoader.isInitialized()) {
58+
NativeLoader.init(new SystemDelegate());
59+
}
60+
return new Module(new NativePeer(modelPath, loadMode));
61+
}
62+
63+
/**
64+
* Loads a serialized ExecuTorch module from the specified path on the disk to run on CPU.
65+
*
66+
* @param modelPath path to file that contains the serialized ExecuTorch module.
67+
* @return new {@link org.pytorch.executorch.Module} object which owns the model module.
68+
*/
69+
public static Module load(final String modelPath) {
70+
return load(modelPath, LOAD_MODE_FILE);
71+
}
3172

32-
/** Load mode for the module. Use memory locking and ignore errors. */
33-
public static final int LOAD_MODE_MMAP_USE_MLOCK_IGNORE_ERRORS = 3;
73+
Module(NativePeer nativePeer) {
74+
this.mNativePeer = nativePeer;
75+
}
3476

35-
/** Reference to the NativePeer object of this module. */
36-
private NativePeer mNativePeer;
77+
/**
78+
* Runs the 'forward' method of this module with the specified arguments.
79+
*
80+
* @param inputs arguments for the ExecuTorch module's 'forward' method. Note: if method 'forward'
81+
* requires inputs but no inputs are given, the function will not error out, but run 'forward'
82+
* with sample inputs.
83+
* @return return value from the 'forward' method.
84+
*/
85+
public EValue[] forward(EValue... inputs) {
86+
return mNativePeer.forward(inputs);
87+
}
88+
89+
/**
90+
* Runs the specified method of this module with the specified arguments.
91+
*
92+
* @param methodName name of the ExecuTorch method to run.
93+
* @param inputs arguments that will be passed to ExecuTorch method.
94+
* @return return value from the method.
95+
*/
96+
public EValue[] execute(String methodName, EValue... inputs) {
97+
return mNativePeer.execute(methodName, inputs);
98+
}
99+
100+
/**
101+
* Load a method on this module. This might help with the first time inference performance,
102+
* because otherwise the method is loaded lazily when it's execute. Note: this function is
103+
* synchronous, and will block until the method is loaded. Therefore, it is recommended to call
104+
* this on a background thread. However, users need to make sure that they don't execute before
105+
* this function returns.
106+
*
107+
* @return the Error code if there was an error loading the method
108+
*/
109+
public int loadMethod(String methodName) {
110+
return mNativePeer.loadMethod(methodName);
111+
}
112+
113+
/**
114+
* Retrieve the in-memory log buffer, containing the most recent ExecuTorch log entries.
115+
*/
116+
public String[] readLogBuffer() {
117+
return mNativePeer.readLogBuffer();
118+
}
37119

38-
/**
39-
* Loads a serialized ExecuTorch module from the specified path on the disk.
40-
*
41-
* @param modelPath path to file that contains the serialized ExecuTorch module.
42-
* @param loadMode load mode for the module. See constants in {@link Module}.
43-
* @return new {@link org.pytorch.executorch.Module} object which owns the model module.
44-
*/
45-
public static Module load(final String modelPath, int loadMode) {
46-
if (!NativeLoader.isInitialized()) {
47-
NativeLoader.init(new SystemDelegate());
120+
/**
121+
* Explicitly destroys the native torch::jit::Module. Calling this method is not required, as the
122+
* native object will be destroyed when this object is garbage-collected. However, the timing of
123+
* garbage collection is not guaranteed, so proactively calling {@code destroy} can free memory
124+
* more quickly. See {@link com.facebook.jni.HybridData#resetNative}.
125+
*/
126+
public void destroy() {
127+
mNativePeer.resetNative();
48128
}
49-
return new Module(new NativePeer(modelPath, loadMode));
50-
}
51-
52-
/**
53-
* Loads a serialized ExecuTorch module from the specified path on the disk to run on CPU.
54-
*
55-
* @param modelPath path to file that contains the serialized ExecuTorch module.
56-
* @return new {@link org.pytorch.executorch.Module} object which owns the model module.
57-
*/
58-
public static Module load(final String modelPath) {
59-
return load(modelPath, LOAD_MODE_FILE);
60-
}
61-
62-
Module(NativePeer nativePeer) {
63-
this.mNativePeer = nativePeer;
64-
}
65-
66-
/**
67-
* Runs the 'forward' method of this module with the specified arguments.
68-
*
69-
* @param inputs arguments for the ExecuTorch module's 'forward' method. Note: if method 'forward'
70-
* requires inputs but no inputs are given, the function will not error out, but run 'forward'
71-
* with sample inputs.
72-
* @return return value from the 'forward' method.
73-
*/
74-
public EValue[] forward(EValue... inputs) {
75-
return mNativePeer.forward(inputs);
76-
}
77-
78-
/**
79-
* Runs the specified method of this module with the specified arguments.
80-
*
81-
* @param methodName name of the ExecuTorch method to run.
82-
* @param inputs arguments that will be passed to ExecuTorch method.
83-
* @return return value from the method.
84-
*/
85-
public EValue[] execute(String methodName, EValue... inputs) {
86-
return mNativePeer.execute(methodName, inputs);
87-
}
88-
89-
/**
90-
* Load a method on this module. This might help with the first time inference performance,
91-
* because otherwise the method is loaded lazily when it's execute. Note: this function is
92-
* synchronous, and will block until the method is loaded. Therefore, it is recommended to call
93-
* this on a background thread. However, users need to make sure that they don't execute before
94-
* this function returns.
95-
*
96-
* @return the Error code if there was an error loading the method
97-
*/
98-
public int loadMethod(String methodName) {
99-
return mNativePeer.loadMethod(methodName);
100-
}
101-
102-
/**
103-
* Explicitly destroys the native torch::jit::Module. Calling this method is not required, as the
104-
* native object will be destroyed when this object is garbage-collected. However, the timing of
105-
* garbage collection is not guaranteed, so proactively calling {@code destroy} can free memory
106-
* more quickly. See {@link com.facebook.jni.HybridData#resetNative}.
107-
*/
108-
public void destroy() {
109-
mNativePeer.resetNative();
110-
}
111129
}

0 commit comments

Comments
 (0)