Updated fixed context
Browse files
app.py
CHANGED
@@ -9,7 +9,8 @@ from transformers import DistilBertTokenizer, TFDistilBertForQuestionAnswering
|
|
9 |
# Load model & tokenizer
|
10 |
# model_name = "deepset/roberta-base-squad2"
|
11 |
# model_name = "AmazonScience/qanlu"
|
12 |
-
model_name = 'distilbert-base-cased-distilled-squad'
|
|
|
13 |
# tokenizer = AutoTokenizer.from_pretrained("bert-large-uncased-whole-word-masking-finetuned-squad")
|
14 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
15 |
|
@@ -45,8 +46,7 @@ nlp = pipeline('question-answering', model=model_name, tokenizer=model_name)
|
|
45 |
fixed_context = """Ishaan is 6 year old kid. he is very good in football. He is very good sports person.
|
46 |
he is smart kid. He can run very fast. as fast as 10 meters in 1 minute.
|
47 |
He goes to Vidyani ketan school. He goes to school from 8 am to 3:30 pm.
|
48 |
-
Ishaan has many friends.
|
49 |
-
Vineet is his brother. """
|
50 |
|
51 |
# def get_answer(fixed_context,question):
|
52 |
# QA_input = {
|
|
|
9 |
# Load model & tokenizer
|
10 |
# model_name = "deepset/roberta-base-squad2"
|
11 |
# model_name = "AmazonScience/qanlu"
|
12 |
+
# model_name = 'distilbert-base-cased-distilled-squad'
|
13 |
+
model_name = "bert-large-uncased-whole-word-masking-finetuned-squad"
|
14 |
# tokenizer = AutoTokenizer.from_pretrained("bert-large-uncased-whole-word-masking-finetuned-squad")
|
15 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
16 |
|
|
|
46 |
fixed_context = """Ishaan is 6 year old kid. he is very good in football. He is very good sports person.
|
47 |
he is smart kid. He can run very fast. as fast as 10 meters in 1 minute.
|
48 |
He goes to Vidyani ketan school. He goes to school from 8 am to 3:30 pm.
|
49 |
+
Ishaan has many friends. Vineet is Ishaan's brother. """
|
|
|
50 |
|
51 |
# def get_answer(fixed_context,question):
|
52 |
# QA_input = {
|