seawolf2357 commited on
Commit
4509126
โ€ข
1 Parent(s): 0926d14

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -6
app.py CHANGED
@@ -14,6 +14,9 @@ intents.messages = True
14
  # ์ถ”๋ก  API ํด๋ผ์ด์–ธํŠธ ์„ค์ •
15
  hf_client = InferenceClient("meta-llama/Meta-Llama-3-70B-Instruct", token=os.getenv("HF_TOKEN"))
16
 
 
 
 
17
  class MyClient(discord.Client):
18
  def __init__(self, *args, **kwargs):
19
  super().__init__(*args, **kwargs)
@@ -26,7 +29,7 @@ class MyClient(discord.Client):
26
  logging.info('์ž์‹ ์˜ ๋ฉ”์‹œ์ง€๋Š” ๋ฌด์‹œํ•ฉ๋‹ˆ๋‹ค.')
27
  return
28
 
29
- logging.debug(f'Receiving message: {message.content}') # ๋กœ๊น… ์ถ”๊ฐ€
30
  response = await generate_response(message.content)
31
  await message.channel.send(response)
32
 
@@ -40,22 +43,25 @@ async def generate_response(user_input):
40
  ๋ฐ˜๋“œ์‹œ ํ•œ๊ธ€๋กœ ๋‹ต๋ณ€ํ•˜์‹ญ์‹œ์˜ค.
41
  """
42
 
43
- # ๊ตฌ์ฒด์ ์ธ ํ”„๋กฌํ”„ํŠธ๋ฅผ ์ œ๊ณตํ•˜์—ฌ ๋ชจ๋ธ์˜ ์‘๋‹ต์„ ์œ ๋„ํ•ฉ๋‹ˆ๋‹ค.
44
- messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
45
- messages.append({"role": "user", "content": user_input})
 
 
46
 
47
  # ๋™๊ธฐ ํ•จ์ˆ˜๋ฅผ ๋น„๋™๊ธฐ๋กœ ์ฒ˜๋ฆฌํ•˜๊ธฐ ์œ„ํ•œ ๋ž˜ํผ ์‚ฌ์šฉ, stream=true๋กœ ๋ณ€๊ฒฝ
48
  loop = asyncio.get_event_loop()
49
  response = await loop.run_in_executor(None, lambda: hf_client.chat_completion(
50
- messages, max_tokens=200, stream=True, temperature=0.9, top_p=0.9)) # ์กฐ์ •๋œ ํŒŒ๋ผ๋ฏธํ„ฐ
51
 
52
  # ์ŠคํŠธ๋ฆฌ๋ฐ ์‘๋‹ต์„ ์ฒ˜๋ฆฌํ•˜๋Š” ๋กœ์ง ์ถ”๊ฐ€
53
  full_response = ""
54
  for part in response:
55
  full_response += part.choices[0].delta.content.strip()
56
 
57
- logging.debug(f'Model response: {full_response}') # ์‘๋‹ต ๋กœ๊น…
58
 
 
59
  return full_response
60
 
61
  # ๋””์Šค์ฝ”๋“œ ๋ด‡ ์ธ์Šคํ„ด์Šค ์ƒ์„ฑ ๋ฐ ์‹คํ–‰
 
14
  # ์ถ”๋ก  API ํด๋ผ์ด์–ธํŠธ ์„ค์ •
15
  hf_client = InferenceClient("meta-llama/Meta-Llama-3-70B-Instruct", token=os.getenv("HF_TOKEN"))
16
 
17
+ # ๋Œ€ํ™” ํžˆ์Šคํ† ๋ฆฌ๋ฅผ ์ €์žฅํ•  ๋ณ€์ˆ˜
18
+ conversation_history = []
19
+
20
  class MyClient(discord.Client):
21
  def __init__(self, *args, **kwargs):
22
  super().__init__(*args, **kwargs)
 
29
  logging.info('์ž์‹ ์˜ ๋ฉ”์‹œ์ง€๋Š” ๋ฌด์‹œํ•ฉ๋‹ˆ๋‹ค.')
30
  return
31
 
32
+ logging.debug(f'Receiving message: {message.content}')
33
  response = await generate_response(message.content)
34
  await message.channel.send(response)
35
 
 
43
  ๋ฐ˜๋“œ์‹œ ํ•œ๊ธ€๋กœ ๋‹ต๋ณ€ํ•˜์‹ญ์‹œ์˜ค.
44
  """
45
 
46
+ # ๋Œ€ํ™” ํžˆ์Šคํ† ๋ฆฌ ๊ด€๋ฆฌ
47
+ global conversation_history
48
+ conversation_history.append({"role": "user", "content": user_input})
49
+
50
+ messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}] + conversation_history
51
 
52
  # ๋™๊ธฐ ํ•จ์ˆ˜๋ฅผ ๋น„๋™๊ธฐ๋กœ ์ฒ˜๋ฆฌํ•˜๊ธฐ ์œ„ํ•œ ๋ž˜ํผ ์‚ฌ์šฉ, stream=true๋กœ ๋ณ€๊ฒฝ
53
  loop = asyncio.get_event_loop()
54
  response = await loop.run_in_executor(None, lambda: hf_client.chat_completion(
55
+ messages, max_tokens=200, stream=True, temperature=0.9, top_p=0.9))
56
 
57
  # ์ŠคํŠธ๋ฆฌ๋ฐ ์‘๋‹ต์„ ์ฒ˜๋ฆฌํ•˜๋Š” ๋กœ์ง ์ถ”๊ฐ€
58
  full_response = ""
59
  for part in response:
60
  full_response += part.choices[0].delta.content.strip()
61
 
62
+ conversation_history.append({"role": "assistant", "content": full_response})
63
 
64
+ logging.debug(f'Model response: {full_response}')
65
  return full_response
66
 
67
  # ๋””์Šค์ฝ”๋“œ ๋ด‡ ์ธ์Šคํ„ด์Šค ์ƒ์„ฑ ๋ฐ ์‹คํ–‰