ML610 commited on
Commit
cca5ead
1 Parent(s): 2f89d92

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -33,7 +33,7 @@ def generate(
33
  ):
34
  """run model inference, will return a Generator if streaming is true"""
35
 
36
- return llm(format_prompt(user_prompt), **asdict(generation_config))
37
 
38
  config = AutoConfig.from_pretrained(
39
  "teknium/Replit-v2-CodeInstruct-3B", context_length=2048
 
33
  ):
34
  """run model inference, will return a Generator if streaming is true"""
35
 
36
+ return next(llm(format_prompt(user_prompt), **asdict(generation_config)))["text"]
37
 
38
  config = AutoConfig.from_pretrained(
39
  "teknium/Replit-v2-CodeInstruct-3B", context_length=2048