ML610 commited on
Commit
a5a87a4
1 Parent(s): 67621e9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -3
app.py CHANGED
@@ -33,8 +33,7 @@ def generate(
33
  ):
34
  """run model inference, will return a Generator if streaming is true"""
35
 
36
- generator = llm(format_prompt(user_prompt), **asdict(generation_config))
37
- return generator.generated_text
38
 
39
  config = AutoConfig.from_pretrained(
40
  "teknium/Replit-v2-CodeInstruct-3B", context_length=2048
@@ -69,8 +68,12 @@ example_2 = "Write a python script which prints 'you are logged in' only if the
69
 
70
  examples = [example_1, example_2]
71
 
 
 
 
 
72
  UI = gr.Interface(
73
- fn=generate,
74
  inputs=gr.Textbox(label="user_prompt", placeholder="Ask your queries here...."),
75
  outputs=gr.Textbox(label="Assistant"),
76
  title=title,
 
33
  ):
34
  """run model inference, will return a Generator if streaming is true"""
35
 
36
+ return llm(format_prompt(user_prompt), **asdict(generation_config))
 
37
 
38
  config = AutoConfig.from_pretrained(
39
  "teknium/Replit-v2-CodeInstruct-3B", context_length=2048
 
68
 
69
  examples = [example_1, example_2]
70
 
71
+ def generate_code(user_input):
72
+ response = generate(llm, generation_config, user_input)
73
+ return response
74
+
75
  UI = gr.Interface(
76
+ fn=generate_code,
77
  inputs=gr.Textbox(label="user_prompt", placeholder="Ask your queries here...."),
78
  outputs=gr.Textbox(label="Assistant"),
79
  title=title,