Spaces:
Sleeping
Sleeping
Peter
commited on
Commit
•
f4f4797
1
Parent(s):
b38cd28
formatting
Browse files
app.py
CHANGED
@@ -77,17 +77,17 @@ def proc_submission(
|
|
77 |
clean_text = clean(input_text, lower=False)
|
78 |
processed = truncate_word_count(clean_text, max_input_length)
|
79 |
if processed["was_truncated"]:
|
80 |
-
|
81 |
history["was_truncated"] = True
|
82 |
msg = f"Input text was truncated to {max_input_length} characters."
|
83 |
logging.warning(msg)
|
84 |
history["WARNING"] = msg
|
85 |
else:
|
86 |
-
|
87 |
history["was_truncated"] = False
|
88 |
|
89 |
_summaries = summarize_via_tokenbatches(
|
90 |
-
|
91 |
model,
|
92 |
tokenizer,
|
93 |
batch_length=token_batch_length,
|
@@ -96,9 +96,9 @@ def proc_submission(
|
|
96 |
sum_text = [s["summary"][0] for s in _summaries]
|
97 |
sum_scores = [f"\n - {round(s['summary_score'],4)}" for s in _summaries]
|
98 |
|
99 |
-
history["Input"] = input_text
|
100 |
history["Summary Text"] = "\n\t".join(sum_text)
|
101 |
history["Summary Scores"] = "\n".join(sum_scores)
|
|
|
102 |
html = ""
|
103 |
for name, item in history.items():
|
104 |
html += (
|
@@ -139,7 +139,7 @@ if __name__ == "__main__":
|
|
139 |
inputs=[
|
140 |
gr.inputs.Textbox(lines=10, label="input text"),
|
141 |
gr.inputs.Slider(
|
142 |
-
minimum=1, maximum=6, label="num_beams", default=
|
143 |
),
|
144 |
gr.inputs.Slider(
|
145 |
minimum=512, maximum=2048, label="token_batch_length", default=1024, step=512,
|
|
|
77 |
clean_text = clean(input_text, lower=False)
|
78 |
processed = truncate_word_count(clean_text, max_input_length)
|
79 |
if processed["was_truncated"]:
|
80 |
+
tr_in = processed["truncated_text"]
|
81 |
history["was_truncated"] = True
|
82 |
msg = f"Input text was truncated to {max_input_length} characters."
|
83 |
logging.warning(msg)
|
84 |
history["WARNING"] = msg
|
85 |
else:
|
86 |
+
tr_in = input_text
|
87 |
history["was_truncated"] = False
|
88 |
|
89 |
_summaries = summarize_via_tokenbatches(
|
90 |
+
tr_in,
|
91 |
model,
|
92 |
tokenizer,
|
93 |
batch_length=token_batch_length,
|
|
|
96 |
sum_text = [s["summary"][0] for s in _summaries]
|
97 |
sum_scores = [f"\n - {round(s['summary_score'],4)}" for s in _summaries]
|
98 |
|
|
|
99 |
history["Summary Text"] = "\n\t".join(sum_text)
|
100 |
history["Summary Scores"] = "\n".join(sum_scores)
|
101 |
+
history["Input"] = tr_in
|
102 |
html = ""
|
103 |
for name, item in history.items():
|
104 |
html += (
|
|
|
139 |
inputs=[
|
140 |
gr.inputs.Textbox(lines=10, label="input text"),
|
141 |
gr.inputs.Slider(
|
142 |
+
minimum=1, maximum=6, label="num_beams", default=4, step=1
|
143 |
),
|
144 |
gr.inputs.Slider(
|
145 |
minimum=512, maximum=2048, label="token_batch_length", default=1024, step=512,
|