Omnibus commited on
Commit
779984c
1 Parent(s): 0501c46

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -12
app.py CHANGED
@@ -16,18 +16,23 @@ def format_prompt(message, history):
16
  prompt += f"[INST] {message} [/INST]"
17
  return prompt
18
 
19
- def compress_history(purpose, task, history):
20
- resp = run_gpt(
21
- COMPRESS_HISTORY,
22
- stop_tokens=["observation:", "task:", "action:", "thought:"],
23
- max_tokens=512,
24
- seed=random.randint(1,1000000000),
25
- purpose=purpose,
26
- task=task,
27
- history=history,
 
28
  )
29
- history = resp
30
- return history
 
 
 
 
31
  MAX_HISTORY=100
32
 
33
  def generate(
@@ -55,7 +60,7 @@ def generate(
55
  cnt+=len(l.split("\n"))
56
  print(f'cnt:: {cnt}')
57
  if cnt > MAX_HISTORY:
58
- history = compress_history(history, prompt, history)
59
  formatted_prompt = format_prompt(f"{GAME_MASTER.format(history=history)}, {prompt}", history)
60
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
61
  output = ""
 
16
  prompt += f"[INST] {message} [/INST]"
17
  return prompt
18
 
19
+ def compress_history(history,temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0):
20
+ formatted_prompt=f"{COMPRESS_HISTORY.format(history=history)"
21
+ generate_kwargs = dict(
22
+ temperature=temperature,
23
+ max_new_tokens=max_new_tokens,
24
+ top_p=top_p,
25
+ repetition_penalty=repetition_penalty,
26
+ do_sample=True,
27
+ seed=random.randint(1,99999999999)
28
+ #seed=42,
29
  )
30
+ stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
31
+ output = ""
32
+ for response in stream:
33
+ output += response.token.text
34
+ return output
35
+
36
  MAX_HISTORY=100
37
 
38
  def generate(
 
60
  cnt+=len(l.split("\n"))
61
  print(f'cnt:: {cnt}')
62
  if cnt > MAX_HISTORY:
63
+ history = compress_history(history, temperature, max_new_tokens, top_p, repetition_penalty)
64
  formatted_prompt = format_prompt(f"{GAME_MASTER.format(history=history)}, {prompt}", history)
65
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
66
  output = ""