Spaces:
Sleeping
Sleeping
:boom: [Fix] temperature must be strictly positive
Browse files
networks/message_streamer.py
CHANGED
@@ -43,7 +43,10 @@ class MessageStreamer:
|
|
43 |
line = line.decode("utf-8")
|
44 |
line = re.sub(r"data:\s*", "", line)
|
45 |
data = json.loads(line)
|
46 |
-
|
|
|
|
|
|
|
47 |
return content
|
48 |
|
49 |
def count_tokens(self, text):
|
@@ -76,8 +79,9 @@ class MessageStreamer:
|
|
76 |
|
77 |
if temperature is None or temperature < 0:
|
78 |
temperature = 0.0
|
79 |
-
# temperature must be positive for HF LLM models
|
80 |
-
temperature =
|
|
|
81 |
|
82 |
token_limit = (
|
83 |
self.TOKEN_LIMIT_MAP[self.model]
|
|
|
43 |
line = line.decode("utf-8")
|
44 |
line = re.sub(r"data:\s*", "", line)
|
45 |
data = json.loads(line)
|
46 |
+
try:
|
47 |
+
content = data["token"]["text"]
|
48 |
+
except:
|
49 |
+
logger.err(data)
|
50 |
return content
|
51 |
|
52 |
def count_tokens(self, text):
|
|
|
79 |
|
80 |
if temperature is None or temperature < 0:
|
81 |
temperature = 0.0
|
82 |
+
# temperature must be positive and <= 1 for HF LLM models
|
83 |
+
temperature = max(temperature, 0.01)
|
84 |
+
temperature = min(temperature, 1)
|
85 |
|
86 |
token_limit = (
|
87 |
self.TOKEN_LIMIT_MAP[self.model]
|