add the tutorial 2
This commit is contained in:
parent
d0896e6057
commit
309f6c6849
71
T2_tools_define/tools.py
Normal file
71
T2_tools_define/tools.py
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
from typing import Optional
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from smolagents.agents import ToolCallingAgent
|
||||||
|
from smolagents import CodeAgent, HfApiModel, tool
|
||||||
|
from huggingface_hub import login
|
||||||
|
from smolagents import LiteLLMModel
|
||||||
|
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
import os
|
||||||
|
|
||||||
|
# load .env file
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
api_key = os.environ.get('API_KEY')
|
||||||
|
weather_api = os.environ.get('WEATHER_API')
|
||||||
|
|
||||||
|
#print(api_key)
|
||||||
|
login(api_key)
|
||||||
|
|
||||||
|
|
||||||
|
# Select LLM engine to use!
|
||||||
|
model = HfApiModel()
|
||||||
|
# model = LiteLLMModel(
|
||||||
|
# model_id="ollama_chat/llama3.1",
|
||||||
|
# api_base="http://localhost:11434", # replace with remote open-ai compatible server if necessary
|
||||||
|
# #api_key="your-api-key", # replace with API key if necessary
|
||||||
|
# #num_ctx=8192, # ollama default is 2048 which will often fail horribly. 8192 works for easy tasks, more is better. Check https://huggingface.co/spaces/NyxKrage/LLM-Model-VRAM-Calculator to calculate how much VRAM this will need for the selected model.
|
||||||
|
# )
|
||||||
|
|
||||||
|
|
||||||
|
@tool
|
||||||
|
def get_weather(location: str, celsius: Optional[bool] = False) -> str:
|
||||||
|
"""
|
||||||
|
Get the current weather at the given location using the WeatherStack API.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
location: The location (city name).
|
||||||
|
celsius: Whether to return the temperature in Celsius (default is False, which returns Fahrenheit).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A string describing the current weather at the location.
|
||||||
|
"""
|
||||||
|
api_key = weather_api # Replace with your API key from https://weatherstack.com/
|
||||||
|
units = "m" if celsius else "f" # 'm' for Celsius, 'f' for Fahrenheit
|
||||||
|
|
||||||
|
url = f"http://api.weatherstack.com/current?access_key={api_key}&query={location}&units={units}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.get(url)
|
||||||
|
response.raise_for_status() # Raise an exception for HTTP errors
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
if data.get("error"): # Check if there's an error in the response
|
||||||
|
return f"Error: {data['error'].get('info', 'Unable to fetch weather data.')}"
|
||||||
|
|
||||||
|
weather = data["current"]["weather_descriptions"][0]
|
||||||
|
temp = data["current"]["temperature"]
|
||||||
|
temp_unit = "°C" if celsius else "°F"
|
||||||
|
|
||||||
|
return f"The current weather in {location} is {weather} with a temperature of {temp} {temp_unit}."
|
||||||
|
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
return f"Error fetching weather data: {str(e)}"
|
||||||
|
|
||||||
|
|
||||||
|
agent = ToolCallingAgent(tools=[get_weather], model=model)
|
||||||
|
# agent = CodeAgent(tools=[get_weather], model=model)
|
||||||
|
|
||||||
|
agent.run("What is the weather in New York?")
|
Loading…
Reference in New Issue
Block a user