Futuresony commited on
Commit
18c3fd1
·
verified ·
1 Parent(s): f14952f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -27
app.py CHANGED
@@ -1,39 +1,41 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
- from serpapi import GoogleSearch # Import SerpAPI
4
-
5
- SERPAPI_KEY = "your_serpapi_key_here" # Replace with your API key
 
 
6
 
7
  client = InferenceClient("Futuresony/future_ai_12_10_2024.gguf")
8
 
9
  def is_uncertain(question, response):
10
  """Check if the model's response is unreliable."""
11
- if len(response.split()) < 4: # Too short = likely incorrect
12
- return True
13
- if response.lower() in question.lower(): # Repeats question = unsure
14
- return True
15
- uncertain_phrases = [
16
- "Kulingana na utafiti", "Inaaminika kuwa", "Ninadhani",
17
- "It is believed that", "Some people say", "Inasemekana kuwa"
18
- ]
19
- if any(phrase.lower() in response.lower() for phrase in uncertain_phrases):
20
  return True
21
- return False
 
22
 
23
  def google_search(query):
24
- """Fetch search results using SerpAPI."""
25
- params = {
26
- "q": query,
27
- "hl": "en",
28
- "gl": "us",
29
- "api_key": SERPAPI_KEY
30
- }
31
- search = GoogleSearch(params)
32
- results = search.get_dict()
33
-
34
- if "organic_results" in results:
35
- return results["organic_results"][0]["snippet"] # First search result
36
- return "Sorry, I couldn't find an answer on Google."
 
 
 
 
 
 
 
37
 
38
  def respond(message, history, system_message, max_tokens, temperature, top_p):
39
  messages = [{"role": "system", "content": system_message}]
@@ -48,7 +50,6 @@ def respond(message, history, system_message, max_tokens, temperature, top_p):
48
  response += token
49
  yield response # Stream the response
50
 
51
- # If the model's response is unreliable, fetch from Google
52
  if is_uncertain(message, response):
53
  google_response = google_search(message)
54
  yield f"🤖 AI: {response}\n\n🌍 Google: {google_response}"
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
+ from selenium import webdriver
4
+ from selenium.webdriver.common.by import By
5
+ from selenium.webdriver.chrome.service import Service
6
+ from webdriver_manager.chrome import ChromeDriverManager
7
+ import time
8
 
9
  client = InferenceClient("Futuresony/future_ai_12_10_2024.gguf")
10
 
11
  def is_uncertain(question, response):
12
  """Check if the model's response is unreliable."""
13
+ if len(response.split()) < 4 or response.lower() in question.lower():
 
 
 
 
 
 
 
 
14
  return True
15
+ uncertain_phrases = ["Kulingana na utafiti", "Inaaminika kuwa", "Ninadhani", "It is believed that", "Some people say"]
16
+ return any(phrase.lower() in response.lower() for phrase in uncertain_phrases)
17
 
18
  def google_search(query):
19
+ """Fetch search results using Selenium."""
20
+ options = webdriver.ChromeOptions()
21
+ options.add_argument("--headless") # Run in background
22
+ driver = webdriver.Chrome(service=Service(ChromeDriverManager().install()), options=options)
23
+
24
+ driver.get(f"https://www.google.com/search?q={query}")
25
+ time.sleep(2) # Wait for page to load
26
+
27
+ try:
28
+ # Extract answer from featured snippet if available
29
+ snippet = driver.find_element(By.CLASS_NAME, "hgKElc").text
30
+ except:
31
+ # Extract first search result
32
+ try:
33
+ snippet = driver.find_element(By.CSS_SELECTOR, "div.BNeawe.s3v9rd.AP7Wnd").text
34
+ except:
35
+ snippet = "Sorry, I couldn't find an answer on Google."
36
+
37
+ driver.quit()
38
+ return snippet
39
 
40
  def respond(message, history, system_message, max_tokens, temperature, top_p):
41
  messages = [{"role": "system", "content": system_message}]
 
50
  response += token
51
  yield response # Stream the response
52
 
 
53
  if is_uncertain(message, response):
54
  google_response = google_search(message)
55
  yield f"🤖 AI: {response}\n\n🌍 Google: {google_response}"