Add new reverse prompt for new localdocs context feature.

pull/721/head
Adam Treat 1 year ago committed by AT
parent 54fc980cb5
commit 9bfff8bfcb

@ -983,7 +983,7 @@ void GPTJ::prompt(const std::string &prompt,
std::string cachedResponse;
std::vector<gpt_vocab::id> cachedTokens;
std::unordered_set<std::string> reversePrompts
= { "### Instruction", "### Prompt", "### Response", "### Human", "### Assistant" };
= { "### Instruction", "### Prompt", "### Response", "### Human", "### Assistant", "### Context" };
// predict next tokens
int32_t totalPredictions = 0;

@ -179,7 +179,7 @@ void LLamaModel::prompt(const std::string &prompt,
std::string cachedResponse;
std::vector<llama_token> cachedTokens;
std::unordered_set<std::string> reversePrompts
= { "### Instruction", "### Prompt", "### Response", "### Human", "### Assistant" };
= { "### Instruction", "### Prompt", "### Response", "### Human", "### Assistant", "### Context" };
// predict next tokens
int32_t totalPredictions = 0;

@ -908,7 +908,7 @@ void MPT::prompt(const std::string &prompt,
std::string cachedResponse;
std::vector<int> cachedTokens;
std::unordered_set<std::string> reversePrompts
= { "### Instruction", "### Prompt", "### Response", "### Human", "### Assistant" };
= { "### Instruction", "### Prompt", "### Response", "### Human", "### Assistant", "### Context" };
// predict next tokens
int32_t totalPredictions = 0;

Loading…
Cancel
Save