seawolf2357
commited on
Commit
β’
e2de219
1
Parent(s):
7d0592f
Update app.py
Browse files
app.py
CHANGED
@@ -150,20 +150,17 @@ async def generate_replies(comments, transcript):
|
|
150 |
replies = []
|
151 |
for comment, _ in comments:
|
152 |
messages = [
|
153 |
-
{"role": "system", "content":
|
154 |
{"role": "user", "content": comment}
|
155 |
]
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
except Exception as e:
|
165 |
-
logging.error(f"API νΈμΆ μ€ μ€λ₯ λ°μ: {e}")
|
166 |
-
reply = "μλ² μ€λ₯λ‘ μΈν΄ λ΅κΈμ μμ±ν μ μμ΅λλ€."
|
167 |
replies.append(reply)
|
168 |
|
169 |
logging.debug(f'μμ±λ λ΅κΈ: {replies}')
|
|
|
150 |
replies = []
|
151 |
for comment, _ in comments:
|
152 |
messages = [
|
153 |
+
{"role": "system", "content": """λμ μ΄λ¦μ OpenFreeAIμ΄λ€. λ΅κΈ μμ±ν κ°μ₯ λ§μ§λ§μ λμ μ΄λ¦μ λ°νκ³ κ³΅μνκ² μΈμ¬νλΌ. λΉλμ€ μλ§: {transcript}"""},
|
154 |
{"role": "user", "content": comment}
|
155 |
]
|
156 |
+
loop = asyncio.get_event_loop()
|
157 |
+
response = await loop.run_in_executor(None, lambda: hf_client.chat_completion(
|
158 |
+
messages, max_tokens=250, temperature=0.7, top_p=0.85))
|
159 |
+
|
160 |
+
if response.choices and response.choices[0].message:
|
161 |
+
reply = response.choices[0].message['content'].strip()
|
162 |
+
else:
|
163 |
+
reply = "λ΅κΈμ μμ±ν μ μμ΅λλ€."
|
|
|
|
|
|
|
164 |
replies.append(reply)
|
165 |
|
166 |
logging.debug(f'μμ±λ λ΅κΈ: {replies}')
|