Hansimov commited on
Commit
4ba2ca6
1 Parent(s): c769be6

:gem: [Feature] MessageStreamer: Support stop sequence and auto removal

Browse files
Files changed (1) hide show
  1. networks/message_streamer.py +21 -4
networks/message_streamer.py CHANGED
@@ -10,11 +10,16 @@ class MessageStreamer:
10
  MODEL_MAP = {
11
  "mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1", # 72.62, fast [Recommended]
12
  "mistral-7b": "mistralai/Mistral-7B-Instruct-v0.2", # 65.71, fast
13
- "openchat-3.5": "openchat/openchat_3.5", # 61.24, fast
14
  # "zephyr-7b-alpha": "HuggingFaceH4/zephyr-7b-alpha", # 59.5, fast
15
  # "zephyr-7b-beta": "HuggingFaceH4/zephyr-7b-beta", # 61.95, slow
16
  "default": "mistralai/Mixtral-8x7B-Instruct-v0.1",
17
  }
 
 
 
 
 
18
 
19
  def __init__(self, model: str):
20
  if model in self.MODEL_MAP.keys():
@@ -68,6 +73,13 @@ class MessageStreamer:
68
  },
69
  "stream": True,
70
  }
 
 
 
 
 
 
 
71
  logger.back(self.request_url)
72
  enver.set_envs(proxies=True)
73
  stream_response = requests.post(
@@ -100,18 +112,23 @@ class MessageStreamer:
100
  ]
101
  logger.back(final_output)
102
 
 
103
  for line in stream_response.iter_lines():
104
  if not line:
105
  continue
106
  content = self.parse_line(line)
107
 
108
- if content.strip() == "</s>":
109
  logger.success("\n[Finished]")
110
  break
111
  else:
112
  logger.back(content, end="")
113
- final_output["choices"][0]["message"]["content"] += content
 
 
 
114
 
 
115
  return final_output
116
 
117
  def chat_return_generator(self, stream_response):
@@ -122,7 +139,7 @@ class MessageStreamer:
122
 
123
  content = self.parse_line(line)
124
 
125
- if content.strip() == "</s>":
126
  content_type = "Finished"
127
  logger.success("\n[Finished]")
128
  is_finished = True
 
10
  MODEL_MAP = {
11
  "mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1", # 72.62, fast [Recommended]
12
  "mistral-7b": "mistralai/Mistral-7B-Instruct-v0.2", # 65.71, fast
13
+ "openchat-3.5": "openchat/openchat-3.5-1210", # ??, fast
14
  # "zephyr-7b-alpha": "HuggingFaceH4/zephyr-7b-alpha", # 59.5, fast
15
  # "zephyr-7b-beta": "HuggingFaceH4/zephyr-7b-beta", # 61.95, slow
16
  "default": "mistralai/Mixtral-8x7B-Instruct-v0.1",
17
  }
18
+ STOP_SEQUENCES_MAP = {
19
+ "mixtral-8x7b": "</s>",
20
+ "mistral-7b": "</s>",
21
+ "openchat-3.5": "<|end_of_turn|>",
22
+ }
23
 
24
  def __init__(self, model: str):
25
  if model in self.MODEL_MAP.keys():
 
73
  },
74
  "stream": True,
75
  }
76
+
77
+ if self.model in self.STOP_SEQUENCES_MAP.keys():
78
+ self.stop_sequences = self.STOP_SEQUENCES_MAP[self.model]
79
+ # self.request_body["parameters"]["stop_sequences"] = [
80
+ # self.STOP_SEQUENCES[self.model]
81
+ # ]
82
+
83
  logger.back(self.request_url)
84
  enver.set_envs(proxies=True)
85
  stream_response = requests.post(
 
112
  ]
113
  logger.back(final_output)
114
 
115
+ final_content = ""
116
  for line in stream_response.iter_lines():
117
  if not line:
118
  continue
119
  content = self.parse_line(line)
120
 
121
+ if content.strip() == self.stop_sequences:
122
  logger.success("\n[Finished]")
123
  break
124
  else:
125
  logger.back(content, end="")
126
+ final_content += content
127
+
128
+ if self.model in self.STOP_SEQUENCES_MAP.keys():
129
+ final_content = final_content.replace(self.stop_sequences, "")
130
 
131
+ final_output["choices"][0]["message"]["content"] = final_content
132
  return final_output
133
 
134
  def chat_return_generator(self, stream_response):
 
139
 
140
  content = self.parse_line(line)
141
 
142
+ if content.strip() == self.stop_sequences:
143
  content_type = "Finished"
144
  logger.success("\n[Finished]")
145
  is_finished = True