project-baize commited on
Commit
2348315
1 Parent(s): baa7023

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -33,7 +33,7 @@ def predict(text,
33
  except:
34
  yield [[text,"No Model Found"]],[],"No Model Found"
35
  return
36
- torch.cuda.empty_cache()
37
  inputs = generate_prompt_with_history(text,history,tokenizer,max_length=max_context_length_tokens)
38
  if inputs is False:
39
  yield chatbot+[[text,"Sorry, the input is too long."]],history,"Generate Fail"
 
33
  except:
34
  yield [[text,"No Model Found"]],[],"No Model Found"
35
  return
36
+
37
  inputs = generate_prompt_with_history(text,history,tokenizer,max_length=max_context_length_tokens)
38
  if inputs is False:
39
  yield chatbot+[[text,"Sorry, the input is too long."]],history,"Generate Fail"