FuturesonyAi / app.py2
Futuresony's picture
Rename app.pyyy2 to app.py2
3d06565 verified
raw
history blame
3.34 kB
import gradio as gr
import requests
import importlib
import pytz
from datetime import datetime
from bs4 import BeautifulSoup
from huggingface_hub import InferenceClient
# Import weather script
weather = importlib.import_module("weather")
# Hugging Face model
client = InferenceClient("Futuresony/future_ai_12_10_2024.gguf")
def google_search(query):
"""Scrape Google search for an answer."""
url = f"https://www.google.com/search?q={query}"
headers = {"User-Agent": "Mozilla/5.0"}
try:
response = requests.get(url, headers=headers)
soup = BeautifulSoup(response.text, "html.parser")
result = soup.find("div", class_="BNeawe iBp4i AP7Wnd")
if result:
return result.text
return "Sorry, I couldn't find an answer."
except Exception:
return "I'm unable to fetch data from Google right now."
def get_time_in_city(city):
"""Fetch current time for any city using pytz"""
try:
timezone = pytz.timezone(pytz.country_timezones['US'][0]) if city.lower() == "new york" else pytz.utc
now = datetime.now(timezone)
return f"The current time in {city} is {now.strftime('%H:%M:%S')}."
except Exception:
return "I couldn't fetch the time for that city."
def get_current_date():
"""Return today's date"""
return f"Today's date is {datetime.today().strftime('%d %B %Y')}."
def respond(message, history, system_message, max_tokens, temperature, top_p):
"""Chatbot that answers user and fetches real-time info if needed."""
message_lower = message.lower()
# Time-related questions
if "what time" in message_lower or "saa ngapi" in message_lower:
city = message.split()[-1] # Assume last word is city name
return get_time_in_city(city)
# Date-related questions
if "what date" in message_lower or "leo ni tarehe ngapi" in message_lower:
return get_current_date()
# Weather-related questions
if "weather" in message_lower or "hali ya hewa" in message_lower:
city = message.split()[-1]
return weather.get_weather(city)
# General knowledge questions → Try Google if model fails
messages = [{"role": "system", "content": system_message}]
for val in history:
if val[0]: messages.append({"role": "user", "content": val[0]})
if val[1]: messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat_completion(messages, max_tokens=max_tokens, stream=True, temperature=temperature, top_p=top_p):
token = message.choices[0].delta.content
response += token
# If model doesn't know, use Google
if "I don't know" in response or response.strip() == "":
response = google_search(message)
return response
# Gradio UI
demo = gr.ChatInterface(
respond,
additional_inputs=[
gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p"),
],
)
if __name__ == "__main__":
demo.launch()