mohammed3536 commited on
Commit
415cfe5
1 Parent(s): cdaf432

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +95 -0
app.py ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import PyPDF2
2
+ import nltk
3
+ from nltk.tokenize import sent_tokenize
4
+ import random
5
+ import requests
6
+ import streamlit as st
7
+
8
+ # Download NLTK data (if not already downloaded)
9
+ nltk.download('punkt')
10
+ nltk.download('averaged_perceptron_tagger')
11
+
12
+ # ChatGPT API endpoint
13
+ CHATGPT_API_ENDPOINT = "https://api.openai.com/v1/chat/completions"
14
+ OPENAI_API_KEY = "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" # Replace with your ChatGPT API key
15
+
16
+ def extract_text_from_pdf(pdf_file):
17
+ pdf_reader = PyPDF2.PdfReader(pdf_file)
18
+ text = ""
19
+ for page_num in range(len(pdf_reader.pages)):
20
+ text += pdf_reader.pages[page_num].extract_text()
21
+ return text
22
+
23
+ def generate_mcqs_on_topic(text, topic, num_mcqs=5):
24
+ # Tokenize the text into sentences
25
+ sentences = nltk.sent_tokenize(text)
26
+
27
+ # Randomly select sentences to create Questions
28
+ selected_sentences = random.sample(sentences, min(num_mcqs, len(sentences)))
29
+
30
+ mcqs = []
31
+ for sentence in selected_sentences:
32
+ # Use ChatGPT for interactive question generation
33
+ chatgpt_question = generate_question_with_chatgpt(sentence)
34
+ mcqs.append(chatgpt_question)
35
+
36
+ # Use LLM for nuanced language modeling
37
+ llm_question = generate_question_with_llm(sentence, topic)
38
+ mcqs.append(llm_question)
39
+
40
+ return mcqs
41
+
42
+ def generate_question_with_chatgpt(context):
43
+ headers = {
44
+ "Content-Type": "application/json",
45
+ "Authorization": f"Bearer {OPENAI_API_KEY}",
46
+ }
47
+
48
+ data = {
49
+ "model": "gpt-3.5-turbo",
50
+ "messages": [
51
+ {"role": "system", "content": "You are a helpful assistant."},
52
+ {"role": "user", "content": f"What is the question for the following? {context}"},
53
+ ],
54
+ }
55
+
56
+ response = requests.post(CHATGPT_API_ENDPOINT, json=data, headers=headers)
57
+ result = response.json()
58
+
59
+ # Extract the generated question from the response
60
+ generated_question = result["choices"][0]["message"]["content"]
61
+ return generated_question
62
+
63
+
64
+ response = requests.post(LLM_API_ENDPOINT, json=data, headers=headers)
65
+ result = response.json()
66
+
67
+ # Extract the generated question from the response
68
+ generated_question = result["generated_question"]
69
+ return generated_question
70
+
71
+ def main():
72
+ # Title of the Application
73
+ st.title("🤖CB Quiz Generator🧠")
74
+ st.subheader("☕CoffeeBeans☕")
75
+
76
+ # User input
77
+ pdf_file = st.file_uploader("Upload PDF Document:", type=["pdf"])
78
+ num_mcqs = st.number_input("Enter Number of MCQs to Generate:", min_value=1, step=1, value=5)
79
+ topic = st.text_input("Enter the Topic in which the quiz has to be generated")
80
+
81
+ # Button to trigger QUIZ generation
82
+ if st.button("Generate MCQs"):
83
+ if pdf_file:
84
+ text = extract_text_from_pdf(pdf_file)
85
+ mcqs = generate_mcqs_on_topic(text, topic, num_mcqs)
86
+
87
+ # Display the generated Questions
88
+ st.success(f"Generated {num_mcqs} Questions:")
89
+ for i, question in enumerate(mcqs, start=1):
90
+ st.write(f"\nQuestion {i}: {question}")
91
+ else:
92
+ st.error("Please upload a PDF document.")
93
+
94
+ if __name__ == "__main__":
95
+ main()