coffeeee commited on
Commit
67eeb75
1 Parent(s): 6d19242

final fixes to whitespace

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -23,17 +23,17 @@ def generate_response(outputs, new_prompt):
23
  story_so_far = "\n".join(outputs[:int(1024 / response_length + 1)]) if outputs else ""
24
 
25
  set_seed(random.randint(0, 4000000000))
26
- inputs = tokenizer.encode(story_so_far + new_prompt if story_so_far else new_prompt,
27
  return_tensors='pt', truncation=True,
28
  max_length=1024 - response_length)
29
 
30
  output = model.generate(inputs, do_sample=True, generation_config=generation_config)
31
 
32
- response = clean_paragraph(tokenizer.batch_decode(output)[0][(len(story_so_far) if story_so_far else 0):])
33
  outputs.append(response)
34
  return {
35
  user_outputs: outputs,
36
- story: story_so_far + "\n" + response
37
  }
38
 
39
  def undo(outputs):
 
23
  story_so_far = "\n".join(outputs[:int(1024 / response_length + 1)]) if outputs else ""
24
 
25
  set_seed(random.randint(0, 4000000000))
26
+ inputs = tokenizer.encode(story_so_far + "\n" + new_prompt if story_so_far else new_prompt,
27
  return_tensors='pt', truncation=True,
28
  max_length=1024 - response_length)
29
 
30
  output = model.generate(inputs, do_sample=True, generation_config=generation_config)
31
 
32
+ response = clean_paragraph(tokenizer.batch_decode(output)[0][(len(story_so_far) + 1 if story_so_far else 0):])
33
  outputs.append(response)
34
  return {
35
  user_outputs: outputs,
36
+ story: (story_so_far + "\n" if story_so_far else "") + response
37
  }
38
 
39
  def undo(outputs):