Skip to content

Commit

Permalink
llama : catch llama_load_session_file_internal exceptions (ggerganov#…
Browse files Browse the repository at this point in the history
…2022)

* convert checks in llama_load_session_file to throw and handle them

* make llama_load_session_file_internal static

* address feedbacks to avoid using exceptions
  • Loading branch information
randxie authored Jul 1, 2023
1 parent 79f634a commit cb44dbc
Showing 1 changed file with 9 additions and 2 deletions.
11 changes: 9 additions & 2 deletions llama.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3219,7 +3219,7 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) {
return nread;
}

bool llama_load_session_file(struct llama_context * ctx, const char * path_session, llama_token * tokens_out, size_t n_token_capacity, size_t * n_token_count_out) {
static bool llama_load_session_file_internal(struct llama_context * ctx, const char * path_session, llama_token * tokens_out, size_t n_token_capacity, size_t * n_token_count_out) {
llama_file file(path_session, "rb");

// sanity checks
Expand Down Expand Up @@ -3269,8 +3269,15 @@ bool llama_load_session_file(struct llama_context * ctx, const char * path_sessi

llama_set_state_data(ctx, state_data.data());
}
}

return true;
bool llama_load_session_file(struct llama_context * ctx, const char * path_session, llama_token * tokens_out, size_t n_token_capacity, size_t * n_token_count_out) {
try {
return llama_load_session_file_internal(ctx, path_session, tokens_out, n_token_capacity, n_token_count_out);
} catch (const std::exception & err) {
fprintf(stderr, "error loading session file: %s\n", err.what());
return false;
}
}

bool llama_save_session_file(struct llama_context * ctx, const char * path_session, const llama_token * tokens, size_t n_token_count) {
Expand Down

0 comments on commit cb44dbc

Please sign in to comment.