Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
import gradio as gr
|
2 |
import logging
|
3 |
import torch
|
4 |
-
from transformers import
|
5 |
import requests
|
6 |
from bs4 import BeautifulSoup
|
7 |
import json
|
@@ -26,7 +26,6 @@ class Config:
|
|
26 |
REQUEST_TIMEOUT = 10
|
27 |
MAX_DEPTH = 1
|
28 |
SIMILARITY_THRESHOLD = 0.4 # Lowered threshold for testing
|
29 |
-
# Add some example URLs that are publicly accessible
|
30 |
BASE_URLS = [
|
31 |
"https://www.sspencer.k12.in.us/", # Replace with actual school website
|
32 |
# Add more public URLs here
|
@@ -44,10 +43,22 @@ class ResourceItem:
|
|
44 |
|
45 |
class SchoolChatbot:
|
46 |
def __init__(self):
|
47 |
-
self.setup_models()
|
48 |
self.resources = []
|
49 |
self.visited_urls = set()
|
50 |
-
self.crawl_and_index_resources()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
|
52 |
def crawl_and_index_resources(self):
|
53 |
"""Crawl and index resources from the base URLs."""
|
@@ -189,6 +200,7 @@ Source: {best_resource.url}
|
|
189 |
end_idx = min(len(sentences), best_idx + 2)
|
190 |
|
191 |
return '. '.join(sentences[start_idx:end_idx])
|
|
|
192 |
if __name__ == "__main__":
|
193 |
logger.info("Starting chatbot...")
|
194 |
chatbot = SchoolChatbot()
|
|
|
1 |
import gradio as gr
|
2 |
import logging
|
3 |
import torch
|
4 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM
|
5 |
import requests
|
6 |
from bs4 import BeautifulSoup
|
7 |
import json
|
|
|
26 |
REQUEST_TIMEOUT = 10
|
27 |
MAX_DEPTH = 1
|
28 |
SIMILARITY_THRESHOLD = 0.4 # Lowered threshold for testing
|
|
|
29 |
BASE_URLS = [
|
30 |
"https://www.sspencer.k12.in.us/", # Replace with actual school website
|
31 |
# Add more public URLs here
|
|
|
43 |
|
44 |
class SchoolChatbot:
|
45 |
def __init__(self):
|
46 |
+
self.setup_models() # Ensure this method is defined
|
47 |
self.resources = []
|
48 |
self.visited_urls = set()
|
49 |
+
self.crawl_and_index_resources()
|
50 |
+
|
51 |
+
def setup_models(self):
|
52 |
+
"""Initialize all required models"""
|
53 |
+
try:
|
54 |
+
logger.info("Setting up models...")
|
55 |
+
self.tokenizer = AutoTokenizer.from_pretrained(Config.MODEL_NAME)
|
56 |
+
self.model = AutoModelForCausalLM.from_pretrained(Config.MODEL_NAME)
|
57 |
+
self.embedding_model = SentenceTransformer(Config.EMBEDDING_MODEL)
|
58 |
+
logger.info("Models setup completed successfully")
|
59 |
+
except Exception as e:
|
60 |
+
logger.error(f"Error setting up models: {e}")
|
61 |
+
raise
|
62 |
|
63 |
def crawl_and_index_resources(self):
|
64 |
"""Crawl and index resources from the base URLs."""
|
|
|
200 |
end_idx = min(len(sentences), best_idx + 2)
|
201 |
|
202 |
return '. '.join(sentences[start_idx:end_idx])
|
203 |
+
|
204 |
if __name__ == "__main__":
|
205 |
logger.info("Starting chatbot...")
|
206 |
chatbot = SchoolChatbot()
|