Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,24 +1,24 @@
|
|
1 |
import torch
|
2 |
from fastai.text.all import *
|
3 |
-
from blurr.
|
4 |
-
from blurr.
|
5 |
-
from transformers import
|
6 |
|
7 |
# Load the pre-trained model and tokenizer (adjust for Bart if needed)
|
8 |
pretrained_model_name = "facebook/bart-large-cnn" # Or "facebook/bart-base"
|
9 |
-
hf_tokenizer =
|
10 |
|
11 |
def summarize(article):
|
12 |
-
|
13 |
-
|
14 |
|
15 |
-
|
16 |
-
|
17 |
|
18 |
-
|
19 |
-
|
20 |
|
21 |
-
|
22 |
|
23 |
# Create the Gradio interface
|
24 |
iface = gr.Interface(
|
@@ -31,4 +31,4 @@ iface = gr.Interface(
|
|
31 |
)
|
32 |
|
33 |
# Launch the Gradio interface
|
34 |
-
iface.launch()
|
|
|
1 |
import torch
|
2 |
from fastai.text.all import *
|
3 |
+
from blurr.data.all import *
|
4 |
+
from blurr.modeling.all import *
|
5 |
+
from transformers import BartForConditionalGeneration
|
6 |
|
7 |
# Load the pre-trained model and tokenizer (adjust for Bart if needed)
|
8 |
pretrained_model_name = "facebook/bart-large-cnn" # Or "facebook/bart-base"
|
9 |
+
hf_tokenizer = BartTokenizer.from_pretrained(pretrained_model_name)
|
10 |
|
11 |
def summarize(article):
|
12 |
+
# Define your data transformation pipeline here, if applicable
|
13 |
+
# ...
|
14 |
|
15 |
+
# Load the exported model
|
16 |
+
learn = load_learner('article_highlights.pkl')
|
17 |
|
18 |
+
# Generate the summary
|
19 |
+
summary = learn.blurr_generate(article)[0]
|
20 |
|
21 |
+
return summary
|
22 |
|
23 |
# Create the Gradio interface
|
24 |
iface = gr.Interface(
|
|
|
31 |
)
|
32 |
|
33 |
# Launch the Gradio interface
|
34 |
+
iface.launch()
|