Skip to content

Commit 8a4b4d4

Browse files
[Android] Output error message and stack trace in chat (#866)
* [Android] Output error message and stack trace in chat This PR enables the output of error message and stack trace in chat, when the chat backend fails. Co-authored-by: spectrometerHBH <[email protected]> * apply code review suggestions --------- Co-authored-by: spectrometerHBH <[email protected]>
1 parent 42d683c commit 8a4b4d4

File tree

5 files changed

+67
-15
lines changed

5 files changed

+67
-15
lines changed

CMakeLists.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ set(
8282
${TVM_HOME}/3rdparty/picojson
8383
)
8484

85-
set(MLC_LLM_COMPILE_DEFS DMLC_USE_LOGGING_LIBRARY=<tvm/runtime/logging.h>)
85+
set(MLC_LLM_COMPILE_DEFS ${MLC_LLM_COMPILE_DEFS} DMLC_USE_LOGGING_LIBRARY=<tvm/runtime/logging.h>)
8686

8787
target_include_directories(mlc_llm_objs PRIVATE ${MLC_LLM_INCLUDES})
8888
target_compile_definitions(mlc_llm_objs PRIVATE ${MLC_LLM_COMPILE_DEFS})

android/CMakeLists.txt

+1
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ set(ANDROID_BIN_DIR ${CMAKE_CURRENT_BINARY_DIR})
77

88
set(MLC_LLM_DIR ${ANDROID_DIR}/..)
99
set(MLC_LLM_BINARY_DIR mlc_llm)
10+
set(MLC_LLM_COMPILE_DEFS TVM_LOG_CUSTOMIZE=1)
1011
add_subdirectory(${MLC_LLM_DIR} ${MLC_LLM_BINARY_DIR} EXCLUDE_FROM_ALL)
1112

1213
if (NOT DEFINED TVM_HOME)

android/MLCChat/app/src/main/java/ai/mlc/mlcchat/AppViewModel.kt

+39-10
Original file line numberDiff line numberDiff line change
@@ -498,7 +498,7 @@ class AppViewModel(application: Application) : AndroidViewModel(application) {
498498

499499
private fun mainResetChat() {
500500
executorService.submit {
501-
backend.resetChat()
501+
callBackend { backend.resetChat() }
502502
viewModelScope.launch {
503503
clearHistory()
504504
switchToReady()
@@ -528,6 +528,28 @@ class AppViewModel(application: Application) : AndroidViewModel(application) {
528528
modelChatState.value = ModelChatState.Ready
529529
}
530530

531+
private fun switchToFailed() {
532+
modelChatState.value = ModelChatState.Falied
533+
}
534+
535+
private fun callBackend(callback: () -> Unit): Boolean {
536+
try {
537+
callback()
538+
} catch (e: Exception) {
539+
viewModelScope.launch {
540+
val stackTrace = e.stackTraceToString()
541+
val errorMessage = e.localizedMessage
542+
appendMessage(
543+
MessageRole.Bot,
544+
"MLCChat failed\n\nStack trace:\n$stackTrace\n\nError message:\n$errorMessage"
545+
)
546+
switchToFailed()
547+
}
548+
return false
549+
}
550+
return true
551+
}
552+
531553
fun requestResetChat() {
532554
require(interruptable())
533555
interruptChat(
@@ -571,7 +593,7 @@ class AppViewModel(application: Application) : AndroidViewModel(application) {
571593

572594
private fun mainTerminateChat(callback: () -> Unit) {
573595
executorService.submit {
574-
backend.unload()
596+
callBackend { backend.unload() }
575597
viewModelScope.launch {
576598
clearHistory()
577599
switchToReady()
@@ -609,8 +631,10 @@ class AppViewModel(application: Application) : AndroidViewModel(application) {
609631
viewModelScope.launch {
610632
Toast.makeText(application, "Initialize...", Toast.LENGTH_SHORT).show()
611633
}
612-
backend.unload()
613-
backend.reload(modelLib, modelPath)
634+
if (!callBackend {
635+
backend.unload()
636+
backend.reload(modelLib, modelPath)
637+
}) return@submit
614638
viewModelScope.launch {
615639
Toast.makeText(application, "Ready to chat", Toast.LENGTH_SHORT).show()
616640
switchToReady()
@@ -624,11 +648,13 @@ class AppViewModel(application: Application) : AndroidViewModel(application) {
624648
executorService.submit {
625649
appendMessage(MessageRole.User, prompt)
626650
appendMessage(MessageRole.Bot, "")
627-
backend.prefill(prompt)
651+
if (!callBackend { backend.prefill(prompt) }) return@submit
628652
while (!backend.stopped()) {
629-
backend.decode()
630-
val newText = backend.getMessage()
631-
viewModelScope.launch { updateMessage(MessageRole.Bot, newText) }
653+
if (!callBackend {
654+
backend.decode()
655+
val newText = backend.message
656+
viewModelScope.launch { updateMessage(MessageRole.Bot, newText) }
657+
}) return@submit
632658
if (modelChatState.value != ModelChatState.Generating) return@submit
633659
}
634660
val runtimeStats = backend.runtimeStatsText()
@@ -653,7 +679,9 @@ class AppViewModel(application: Application) : AndroidViewModel(application) {
653679
}
654680

655681
fun interruptable(): Boolean {
656-
return modelChatState.value == ModelChatState.Ready || modelChatState.value == ModelChatState.Generating
682+
return modelChatState.value == ModelChatState.Ready
683+
|| modelChatState.value == ModelChatState.Generating
684+
|| modelChatState.value == ModelChatState.Falied
657685
}
658686
}
659687
}
@@ -674,7 +702,8 @@ enum class ModelChatState {
674702
Resetting,
675703
Reloading,
676704
Terminating,
677-
Ready
705+
Ready,
706+
Falied
678707
}
679708

680709
enum class MessageRole {

android/prepare_libs.sh

+3-3
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,8 @@ cmake .. \
2323
-DUSE_HEXAGON_SDK=OFF \
2424
-DMLC_LLM_INSTALL_STATIC_LIB=ON \
2525
-DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=ON \
26-
-DUSE_OPENCL=ON
26+
-DUSE_OPENCL=ON \
27+
-DUSE_CUSTOM_LOGGING=ON \
2728

28-
make tvm4j_runtime_packed -j8
29+
make tvm4j_runtime_packed -j${nproc}
2930
cmake --build . --target install --config release -j
30-

android/src/cpp/tvm_runtime.h

+23-1
Original file line numberDiff line numberDiff line change
@@ -4,4 +4,26 @@
44
#include <dlfcn.h>
55
#include <dmlc/logging.h>
66
#include <dmlc/thread_local.h>
7-
#include <tvm/runtime/c_runtime_api.h>
7+
#include <tvm/runtime/c_runtime_api.h>
8+
9+
#include <android/log.h>
10+
11+
static_assert(TVM_LOG_CUSTOMIZE == 1, "TVM_LOG_CUSTOMIZE must be 1");
12+
13+
namespace tvm {
14+
namespace runtime {
15+
namespace detail {
16+
// Override logging mechanism
17+
[[noreturn]] void LogFatalImpl(const std::string& file, int lineno, const std::string& message) {
18+
std::string m = file + ":" + std::to_string(lineno) + ": " + message;
19+
__android_log_write(ANDROID_LOG_FATAL, "TVM_RUNTIME", m.c_str());
20+
throw InternalError(file, lineno, message);
21+
}
22+
void LogMessageImpl(const std::string& file, int lineno, int level, const std::string& message) {
23+
std::string m = file + ":" + std::to_string(lineno) + ": " + message;
24+
__android_log_write(ANDROID_LOG_DEBUG + level, "TVM_RUNTIME", m.c_str());
25+
}
26+
27+
} // namespace detail
28+
} // namespace runtime
29+
} // namespace tvm

0 commit comments

Comments
 (0)