Skip to content

Commit

Permalink
[FIX] Fix the behavior when input is empty (mlc-ai#328)
Browse files Browse the repository at this point in the history
Patch to mlc-ai#323

Co-authored-by: Zihao Ye <[email protected]>
  • Loading branch information
Hzfengsy and yzh119 authored Jun 6, 2023
1 parent a985533 commit 85f80cc
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions cpp/llm_chat.cc
Original file line number Diff line number Diff line change
Expand Up @@ -491,7 +491,7 @@ class LLMChat {
/*!
* \brief Generate the next token given a prompt.
*/
void PrefillStep(std::string inp) {
void PrefillStep(std::string inp, bool append_conversation = true) {
if (conversation_.name == "LM") {
this->ResetChat();
}
Expand All @@ -502,7 +502,7 @@ class LLMChat {
appeared_token_ids_.clear();
output_message_.clear();
stop_triggered_ = false;
if (!inp.empty()) {
if (append_conversation) {
conversation_.AppendMessage(conversation_.roles[0], inp);
conversation_.AppendReplyHeader(conversation_.roles[1]);
}
Expand Down Expand Up @@ -763,7 +763,7 @@ class LLMChat {
// Clear kv cache
void ResetKVCache() { reset_kv_cache_func_(kv_cache_); }

void ProcessSystemPrompts() { this->PrefillStep(/*inp=*/""); }
void ProcessSystemPrompts() { this->PrefillStep(/*inp=*/"", /*append_conversation=*/false); }

// Utils
static double GetRandomNumber() {
Expand Down

0 comments on commit 85f80cc

Please sign in to comment.