project-baize commited on
Commit
db8c43f
1 Parent(s): 3d2cf1e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -1
app.py CHANGED
@@ -4,6 +4,7 @@ import logging
4
  import sys
5
  import gradio as gr
6
  import torch
 
7
  from app_modules.utils import *
8
  from app_modules.presets import *
9
  from app_modules.overwrites import *
@@ -53,7 +54,7 @@ def predict(text,
53
  x = x[:x.index("[|Human|]")].strip()
54
  if "[|AI|]" in x:
55
  x = x[:x.index("[|AI|]")].strip()
56
- x = x.strip(" ")
57
  a, b= [[y[0],convert_to_markdown(y[1])] for y in history]+[[text, convert_to_markdown(x)]],history + [[text,x]]
58
  yield a, b, "Generating..."
59
  if shared_state.interrupted:
@@ -63,6 +64,8 @@ def predict(text,
63
  return
64
  except:
65
  pass
 
 
66
  torch.cuda.empty_cache()
67
  #print(text)
68
  #print(x)
 
4
  import sys
5
  import gradio as gr
6
  import torch
7
+ import gc
8
  from app_modules.utils import *
9
  from app_modules.presets import *
10
  from app_modules.overwrites import *
 
54
  x = x[:x.index("[|Human|]")].strip()
55
  if "[|AI|]" in x:
56
  x = x[:x.index("[|AI|]")].strip()
57
+ x = x.strip()
58
  a, b= [[y[0],convert_to_markdown(y[1])] for y in history]+[[text, convert_to_markdown(x)]],history + [[text,x]]
59
  yield a, b, "Generating..."
60
  if shared_state.interrupted:
 
64
  return
65
  except:
66
  pass
67
+ del input_ids
68
+ gc.collect()
69
  torch.cuda.empty_cache()
70
  #print(text)
71
  #print(x)