First_agent / app.py
KoRiF's picture
fix syntax error
25d8456 verified
raw
history blame
7.11 kB
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
#from googletrans import Translator
#from deep_translator import GoogleTranslator
from transformers import M2M100ForConditionalGeneration, M2M100Tokenizer
import datetime
import requests
import pytz
import yaml
from tools.final_answer import FinalAnswerTool
from Gradio_UI import GradioUI
# Below is an example of a tool that does nothing. Amaze us with your creativity !
@tool
def my_custom_tool(arg1:str, arg2:int)-> str: #it's import to specify the return type
#Keep this format for the description / args / args description but feel free to modify the tool
"""A tool that does nothing yet
Args:
arg1: the first argument
arg2: the second argument
"""
return "What magic will you build ?"
@tool
def suissknife_advisor_tool()->str:
"""A tool that returns an indication of what to do at the next step - it is useful as first step, before final answer, or in the cases where there is no other suitable tool at the user's disposal and the user does not know what he wants
May be used prior final answer to remember about additional steps
Args: (no args)
"""
advice = '''
Try to undertand from context what is country or region user from and what is his preferred language.
Prefer more vulnerable (small) language when in doubt
In any unclear case: choose belarusian (`be`) as a target language if user preferred language is not clear from the context, or user does not know what he exactly wants.
Translate your answer to the target language using tool `native_language_translator`
If you do not understand for sure what exacly user wants then try to search for a funny or encourage story about Mother Language Day
If target language is defined: Use `native_language_translator` to translate your final answer to the target language
If target language is not defined then just Duplicate your final answer with `en` and `bn` language translation
'''
return advice
@tool
def get_current_time_in_timezone(timezone: str) -> str:
"""A tool that fetches the current local time in a specified timezone.
Args:
timezone: A string representing a valid timezone (e.g., 'America/New_York').
"""
try:
# Create timezone object
tz = pytz.timezone(timezone)
# Get current time in that timezone
local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
return f"The current local time in {timezone} is: {local_time}"
except Exception as e:
return f"Error fetching time for timezone '{timezone}': {str(e)}"
@tool
def weather_history(lat: float, lon: float, start: str, end: str) -> str:
"""
Fetches historical weather data for a given location and date range using the Meteostat library.
Args:
lat: Latitude of the location.
lon: Longitude of the location.
start: Start date in YYYY-MM-DD format.
end: End date in YYYY-MM-DD format.
Returns:
A JSON string containing the historical weather data or an error message.
"""
from meteostat import Point, Daily
try:
# Parse the date strings into datetime objects
start_date = datetime.strptime(start, "%Y-%m-%d")
end_date = datetime.strptime(end, "%Y-%m-%d")
# Define the location using latitude and longitude
location = Point(lat, lon)
# Fetch daily historical data for the location between start_date and end_date
data = Daily(location, start_date, end_date)
data = data.fetch()
# Return the data as a JSON string
return data.to_json()
except Exception as e:
return f"Error fetching weather history: {str(e)}"
@tool
def weather_forecast(lat: float, lon: float) -> str:
"""
Fetches weather forecast data for the next 7 days for a given location using the Open-Meteo API.
Args:
lat: Latitude of the location.
lon: Longitude of the location.
Returns:
A JSON string containing the weather forecast data or an error message.
"""
try:
# Construct the API URL with daily forecast for max and min temperatures.
url = (
f"https://api.open-meteo.com/v1/forecast?"
f"latitude={lat}&longitude={lon}"
f"&daily=temperature_2m_max,temperature_2m_min,precipitation_sum"
f"&timezone=auto"
)
# Request the forecast data
response = requests.get(url)
if response.status_code == 200:
return response.text
else:
return f"Error fetching forecast: {response.status_code}"
except Exception as e:
return f"Error fetching forecast: {str(e)}"
@tool
def native_language_translator(text: str, target_lang: str) -> str:
"""Translates text to a specified native language
Args:
text: Input text to translate
target_lang: Target language code (e.g., 'be', 'es', 'fr', 'zh')
"""
try:
# Load the model and tokenizer
model_name = "facebook/m2m100_418M"
tokenizer = M2M100Tokenizer.from_pretrained(model_name)
model = M2M100ForConditionalGeneration.from_pretrained(model_name)
# Set the target language
tokenizer.src_lang = "en" # Assume input is in English
tokenizer.tgt_lang = target_lang
# Tokenize the input text
encoded_text = tokenizer(text, return_tensors="pt")
# Generate translation
generated_tokens = model.generate(
**encoded_text,
forced_bos_token_id=tokenizer.get_lang_id(target_lang)
)
translation = tokenizer.decode(generated_tokens[0], skip_special_tokens=True)
except Exception as e:
return f"Translation failed: {str(e)}"
search_tool = DuckDuckGoSearchTool()
final_answer = FinalAnswerTool()
# If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
# model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
model = HfApiModel(
max_tokens=2096,
temperature=0.5,
model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
custom_role_conversions=None,
)
# Import tool from Hub
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
with open("prompts.yaml", 'r') as stream:
prompt_templates = yaml.safe_load(stream)
agent = CodeAgent(
model=model,
tools=[
search_tool,
get_current_time_in_timezone,
native_language_translator,
weather_history,
weather_forecast,
suissknife_advisor_tool,
final_answer,
], ## add your tools here (don't remove final answer)
max_steps=6,
verbosity_level=1,
grammar=None,
planning_interval=None,
name=None,
description=None,
prompt_templates=prompt_templates
)
GradioUI(agent).launch()