Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
3 |
-
import
|
4 |
from PIL import Image
|
5 |
import requests
|
6 |
import torch
|
@@ -9,8 +9,6 @@ from torchvision.models import resnet50
|
|
9 |
|
10 |
# Initialize inference client for chat
|
11 |
chat_client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
12 |
-
# Initialize Wikipedia API
|
13 |
-
wiki_wiki = wikipediaapi.Wikipedia('en')
|
14 |
|
15 |
# Load pre-trained image classification model
|
16 |
model = resnet50(pretrained=True)
|
@@ -23,14 +21,15 @@ transform = transforms.Compose([
|
|
23 |
])
|
24 |
|
25 |
def search_wikipedia(query):
|
26 |
-
|
27 |
-
|
28 |
-
return
|
29 |
-
|
|
|
|
|
30 |
return "No information found on that topic."
|
31 |
|
32 |
def respond(message, history, system_message, max_tokens, temperature, top_p):
|
33 |
-
# Search Wikipedia for information
|
34 |
search_response = search_wikipedia(message)
|
35 |
|
36 |
# Prepare the chat messages
|
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
3 |
+
import wikipedia
|
4 |
from PIL import Image
|
5 |
import requests
|
6 |
import torch
|
|
|
9 |
|
10 |
# Initialize inference client for chat
|
11 |
chat_client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
|
|
|
|
12 |
|
13 |
# Load pre-trained image classification model
|
14 |
model = resnet50(pretrained=True)
|
|
|
21 |
])
|
22 |
|
23 |
def search_wikipedia(query):
|
24 |
+
try:
|
25 |
+
summary = wikipedia.summary(query)
|
26 |
+
return summary
|
27 |
+
except wikipedia.exceptions.DisambiguationError as e:
|
28 |
+
return f"Disambiguation error: {e}"
|
29 |
+
except wikipedia.exceptions.PageError:
|
30 |
return "No information found on that topic."
|
31 |
|
32 |
def respond(message, history, system_message, max_tokens, temperature, top_p):
|
|
|
33 |
search_response = search_wikipedia(message)
|
34 |
|
35 |
# Prepare the chat messages
|