Spaces:
Sleeping
Sleeping
File size: 7,105 Bytes
9b5b26a c7e9f98 da82b09 9b5b26a c19d193 6aae614 8fe992b 9b5b26a 5df72d6 9b5b26a 3d1237b 9b5b26a d9bf9a4 d868286 d9bf9a4 6d8d71d d9bf9a4 9b5b26a 8c01ffb d9bf9a4 da82b09 25d8456 da82b09 25d8456 d9bf9a4 6aae614 ae7a494 e121372 bf6d34c 29ec968 fe328e0 13d500a 8c01ffb 9b5b26a 8c01ffb 861422e 9b5b26a 8c01ffb 8fe992b d9bf9a4 8c01ffb 861422e 8fe992b 9b5b26a 8c01ffb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 |
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
#from googletrans import Translator
#from deep_translator import GoogleTranslator
from transformers import M2M100ForConditionalGeneration, M2M100Tokenizer
import datetime
import requests
import pytz
import yaml
from tools.final_answer import FinalAnswerTool
from Gradio_UI import GradioUI
# Below is an example of a tool that does nothing. Amaze us with your creativity !
@tool
def my_custom_tool(arg1:str, arg2:int)-> str: #it's import to specify the return type
#Keep this format for the description / args / args description but feel free to modify the tool
"""A tool that does nothing yet
Args:
arg1: the first argument
arg2: the second argument
"""
return "What magic will you build ?"
@tool
def suissknife_advisor_tool()->str:
"""A tool that returns an indication of what to do at the next step - it is useful as first step, before final answer, or in the cases where there is no other suitable tool at the user's disposal and the user does not know what he wants
May be used prior final answer to remember about additional steps
Args: (no args)
"""
advice = '''
Try to undertand from context what is country or region user from and what is his preferred language.
Prefer more vulnerable (small) language when in doubt
In any unclear case: choose belarusian (`be`) as a target language if user preferred language is not clear from the context, or user does not know what he exactly wants.
Translate your answer to the target language using tool `native_language_translator`
If you do not understand for sure what exacly user wants then try to search for a funny or encourage story about Mother Language Day
If target language is defined: Use `native_language_translator` to translate your final answer to the target language
If target language is not defined then just Duplicate your final answer with `en` and `bn` language translation
'''
return advice
@tool
def get_current_time_in_timezone(timezone: str) -> str:
"""A tool that fetches the current local time in a specified timezone.
Args:
timezone: A string representing a valid timezone (e.g., 'America/New_York').
"""
try:
# Create timezone object
tz = pytz.timezone(timezone)
# Get current time in that timezone
local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
return f"The current local time in {timezone} is: {local_time}"
except Exception as e:
return f"Error fetching time for timezone '{timezone}': {str(e)}"
@tool
def weather_history(lat: float, lon: float, start: str, end: str) -> str:
"""
Fetches historical weather data for a given location and date range using the Meteostat library.
Args:
lat: Latitude of the location.
lon: Longitude of the location.
start: Start date in YYYY-MM-DD format.
end: End date in YYYY-MM-DD format.
Returns:
A JSON string containing the historical weather data or an error message.
"""
from meteostat import Point, Daily
try:
# Parse the date strings into datetime objects
start_date = datetime.strptime(start, "%Y-%m-%d")
end_date = datetime.strptime(end, "%Y-%m-%d")
# Define the location using latitude and longitude
location = Point(lat, lon)
# Fetch daily historical data for the location between start_date and end_date
data = Daily(location, start_date, end_date)
data = data.fetch()
# Return the data as a JSON string
return data.to_json()
except Exception as e:
return f"Error fetching weather history: {str(e)}"
@tool
def weather_forecast(lat: float, lon: float) -> str:
"""
Fetches weather forecast data for the next 7 days for a given location using the Open-Meteo API.
Args:
lat: Latitude of the location.
lon: Longitude of the location.
Returns:
A JSON string containing the weather forecast data or an error message.
"""
try:
# Construct the API URL with daily forecast for max and min temperatures.
url = (
f"https://api.open-meteo.com/v1/forecast?"
f"latitude={lat}&longitude={lon}"
f"&daily=temperature_2m_max,temperature_2m_min,precipitation_sum"
f"&timezone=auto"
)
# Request the forecast data
response = requests.get(url)
if response.status_code == 200:
return response.text
else:
return f"Error fetching forecast: {response.status_code}"
except Exception as e:
return f"Error fetching forecast: {str(e)}"
@tool
def native_language_translator(text: str, target_lang: str) -> str:
"""Translates text to a specified native language
Args:
text: Input text to translate
target_lang: Target language code (e.g., 'be', 'es', 'fr', 'zh')
"""
try:
# Load the model and tokenizer
model_name = "facebook/m2m100_418M"
tokenizer = M2M100Tokenizer.from_pretrained(model_name)
model = M2M100ForConditionalGeneration.from_pretrained(model_name)
# Set the target language
tokenizer.src_lang = "en" # Assume input is in English
tokenizer.tgt_lang = target_lang
# Tokenize the input text
encoded_text = tokenizer(text, return_tensors="pt")
# Generate translation
generated_tokens = model.generate(
**encoded_text,
forced_bos_token_id=tokenizer.get_lang_id(target_lang)
)
translation = tokenizer.decode(generated_tokens[0], skip_special_tokens=True)
except Exception as e:
return f"Translation failed: {str(e)}"
search_tool = DuckDuckGoSearchTool()
final_answer = FinalAnswerTool()
# If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
# model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
model = HfApiModel(
max_tokens=2096,
temperature=0.5,
model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
custom_role_conversions=None,
)
# Import tool from Hub
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
with open("prompts.yaml", 'r') as stream:
prompt_templates = yaml.safe_load(stream)
agent = CodeAgent(
model=model,
tools=[
search_tool,
get_current_time_in_timezone,
native_language_translator,
weather_history,
weather_forecast,
suissknife_advisor_tool,
final_answer,
], ## add your tools here (don't remove final answer)
max_steps=6,
verbosity_level=1,
grammar=None,
planning_interval=None,
name=None,
description=None,
prompt_templates=prompt_templates
)
GradioUI(agent).launch() |