fix the issue with strange strikethrough output

This commit is contained in:
Deyao Zhu 2023-04-19 10:53:33 +03:00
parent c37ef667ba
commit f5c28362d4

View File

@ -160,7 +160,9 @@ class Chat:
temperature=temperature,
)
output_token = outputs[0]
if output_token[0] == 0:
if output_token[0] == 0: # the model might output a unknow token <unk> at the beginning. remove it
output_token = output_token[1:]
if output_token[0] == 1: # some users find that there is a start token <s> at the beginning. remove it
output_token = output_token[1:]
output_text = self.model.llama_tokenizer.decode(output_token, add_special_tokens=False)
output_text = output_text.split('###')[0] # remove the stop sign '###'