king007 commited on
Commit
cce6693
β€’
1 Parent(s): 3423dc9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -1
app.py CHANGED
@@ -19,6 +19,12 @@ def generate2(prompt, max_new_tokens):
19
  generated_ids = model2.generate(batch["input_ids"], max_new_tokens=150)
20
  output = tokenizer2.batch_decode(generated_ids, skip_special_tokens=True)
21
  return output[0]
 
 
 
 
 
 
22
  def generate_prompt(type, prompt, max_new_tokens):
23
  if type==1:
24
  return generate(prompt)
@@ -29,4 +35,4 @@ input_component = gr.Textbox(label = "Input a persona, e.g. photographer", value
29
  output_component = gr.Textbox(label = "Prompt")
30
  examples = [["photographer"], ["developer"]]
31
  description = ""
32
- gr.Interface(generate, inputs = input_component, outputs=output_component, examples=examples, title = "πŸ‘¨πŸ»β€πŸŽ€ ChatGPT Prompt Generator v12 πŸ‘¨πŸ»β€πŸŽ€", description=description).launch()
 
19
  generated_ids = model2.generate(batch["input_ids"], max_new_tokens=150)
20
  output = tokenizer2.batch_decode(generated_ids, skip_special_tokens=True)
21
  return output[0]
22
+ def generate2_test(prompt):
23
+ batch = tokenizer2(prompt, return_tensors="pt")
24
+ generated_ids = model2.generate(batch["input_ids"], max_new_tokens=150)
25
+ output = tokenizer2.batch_decode(generated_ids, skip_special_tokens=True)
26
+ return output[0]
27
+
28
  def generate_prompt(type, prompt, max_new_tokens):
29
  if type==1:
30
  return generate(prompt)
 
35
  output_component = gr.Textbox(label = "Prompt")
36
  examples = [["photographer"], ["developer"]]
37
  description = ""
38
+ gr.Interface(generate2_test, inputs = input_component, outputs=output_component, examples=examples, title = "πŸ‘¨πŸ»β€πŸŽ€ ChatGPT Prompt Generator v12 πŸ‘¨πŸ»β€πŸŽ€", description=description).launch()