fix issue https://github.com/THUDM/ChatGLM3/issues/227
Browse files修复 github 上的 issue: https://github.com/THUDM/ChatGLM3/issues/227
当模型的预测结果中没有包括 \n 时,不会直接抛错
- modeling_chatglm.py +4 -1
modeling_chatglm.py
CHANGED
@@ -1000,7 +1000,10 @@ class ChatGLMForConditionalGeneration(ChatGLMPreTrainedModel):
|
|
1000 |
content = ""
|
1001 |
history = deepcopy(history)
|
1002 |
for response in output.split("<|assistant|>"):
|
1003 |
-
|
|
|
|
|
|
|
1004 |
if not metadata.strip():
|
1005 |
content = content.strip()
|
1006 |
history.append({"role": "assistant", "metadata": metadata, "content": content})
|
|
|
1000 |
content = ""
|
1001 |
history = deepcopy(history)
|
1002 |
for response in output.split("<|assistant|>"):
|
1003 |
+
if "\n" in response:
|
1004 |
+
metadata, content = response.split("\n", maxsplit=1)
|
1005 |
+
else:
|
1006 |
+
metadata, content = "", response
|
1007 |
if not metadata.strip():
|
1008 |
content = content.strip()
|
1009 |
history.append({"role": "assistant", "metadata": metadata, "content": content})
|