Spaces:
Sleeping
Sleeping
Hazzzardous
commited on
Commit
•
60c9ed5
1
Parent(s):
84ab92c
Update app.py
Browse files
app.py
CHANGED
@@ -85,7 +85,13 @@ def infer(
|
|
85 |
# Clear model state for generative mode
|
86 |
model.resetState()
|
87 |
if (mode == "Q/A"):
|
88 |
-
prompt = f"
|
|
|
|
|
|
|
|
|
|
|
|
|
89 |
|
90 |
print(f"PROMPT ({datetime.now()}):\n-------\n{prompt}")
|
91 |
print(f"OUTPUT ({datetime.now()}):\n-------\n")
|
@@ -244,7 +250,7 @@ iface = gr.Interface(
|
|
244 |
allow_flagging="never",
|
245 |
inputs=[
|
246 |
gr.Textbox(lines=20, label="Prompt"), # prompt
|
247 |
-
gr.Radio(["generative", "Q/A"],
|
248 |
value="generative", label="Choose Mode"),
|
249 |
gr.Slider(1, 256, value=40), # max_tokens
|
250 |
gr.Slider(0.0, 1.0, value=0.8), # temperature
|
|
|
85 |
# Clear model state for generative mode
|
86 |
model.resetState()
|
87 |
if (mode == "Q/A"):
|
88 |
+
prompt = f"\nQ: {prompt}\n\nA:"
|
89 |
+
if (mode == "ELDR"):
|
90 |
+
prompt = f"\n{prompt}\n\nExpert Long Detailed Response:"
|
91 |
+
if (mode == "EFA"):
|
92 |
+
prompt = f'\nAsk Expert\n\nQuestion:\n{prompt}\n\nExpert Full Answer:\n'
|
93 |
+
if (mode == "BFR"):
|
94 |
+
prompt = f"Task given:\n\n{prompt}\n\nBest Full Response:"
|
95 |
|
96 |
print(f"PROMPT ({datetime.now()}):\n-------\n{prompt}")
|
97 |
print(f"OUTPUT ({datetime.now()}):\n-------\n")
|
|
|
250 |
allow_flagging="never",
|
251 |
inputs=[
|
252 |
gr.Textbox(lines=20, label="Prompt"), # prompt
|
253 |
+
gr.Radio(["generative", "Q/A","ELDR","EFR","BFR"],
|
254 |
value="generative", label="Choose Mode"),
|
255 |
gr.Slider(1, 256, value=40), # max_tokens
|
256 |
gr.Slider(0.0, 1.0, value=0.8), # temperature
|