Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -10,12 +10,19 @@ import wikipediaapi
|
|
10 |
from b import b
|
11 |
|
12 |
nltk.download('punkt')
|
|
|
|
|
13 |
from nltk.tokenize import sent_tokenize
|
14 |
|
15 |
# Load spaCy model
|
16 |
nlp = spacy.load("en_core_web_sm")
|
17 |
# wiki_wiki = wikipediaapi.Wikipedia('en')
|
18 |
|
|
|
|
|
|
|
|
|
|
|
19 |
# Load T5 model and tokenizer
|
20 |
model_name = "DevBM/t5-large-squad"
|
21 |
model = T5ForConditionalGeneration.from_pretrained(model_name)
|
@@ -61,11 +68,11 @@ def map_keywords_to_sentences(text, keywords, context_window_size):
|
|
61 |
return keyword_sentence_mapping
|
62 |
|
63 |
# Function to perform entity linking using Wikipedia API
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
|
70 |
# Function to generate questions using beam search
|
71 |
def generate_question(context, answer, num_beams=5):
|
@@ -114,13 +121,13 @@ if st.button("Generate Questions"):
|
|
114 |
for i, (keyword, context) in enumerate(keyword_sentence_mapping.items()):
|
115 |
if i >= num_questions:
|
116 |
break
|
117 |
-
|
118 |
question = generate_question(context, keyword, num_beams=num_beams)
|
119 |
st.write(f"**Context:** {context}")
|
120 |
st.write(f"**Answer:** {keyword}")
|
121 |
st.write(f"**Question:** {question}")
|
122 |
-
|
123 |
-
|
124 |
st.write("---")
|
125 |
data.append((context, keyword, question))
|
126 |
|
|
|
10 |
from b import b
|
11 |
|
12 |
nltk.download('punkt')
|
13 |
+
nltk.download('stopwords')
|
14 |
+
nltk.download('brown')
|
15 |
from nltk.tokenize import sent_tokenize
|
16 |
|
17 |
# Load spaCy model
|
18 |
nlp = spacy.load("en_core_web_sm")
|
19 |
# wiki_wiki = wikipediaapi.Wikipedia('en')
|
20 |
|
21 |
+
# Initialize Wikipedia API with a user agent
|
22 |
+
user_agent = 'QGen/1.0 (channingfisher7@gmail.com)'
|
23 |
+
wiki_wiki = wikipediaapi.Wikipedia(user_agent= user_agent,language='en')
|
24 |
+
|
25 |
+
|
26 |
# Load T5 model and tokenizer
|
27 |
model_name = "DevBM/t5-large-squad"
|
28 |
model = T5ForConditionalGeneration.from_pretrained(model_name)
|
|
|
68 |
return keyword_sentence_mapping
|
69 |
|
70 |
# Function to perform entity linking using Wikipedia API
|
71 |
+
def entity_linking(keyword):
|
72 |
+
page = wiki_wiki.page(keyword)
|
73 |
+
if page.exists():
|
74 |
+
return page.fullurl
|
75 |
+
return None
|
76 |
|
77 |
# Function to generate questions using beam search
|
78 |
def generate_question(context, answer, num_beams=5):
|
|
|
121 |
for i, (keyword, context) in enumerate(keyword_sentence_mapping.items()):
|
122 |
if i >= num_questions:
|
123 |
break
|
124 |
+
linked_entity = entity_linking(keyword)
|
125 |
question = generate_question(context, keyword, num_beams=num_beams)
|
126 |
st.write(f"**Context:** {context}")
|
127 |
st.write(f"**Answer:** {keyword}")
|
128 |
st.write(f"**Question:** {question}")
|
129 |
+
if linked_entity:
|
130 |
+
st.write(f"**Entity Link:** {linked_entity}")
|
131 |
st.write("---")
|
132 |
data.append((context, keyword, question))
|
133 |
|