From a8cee2c39826eabf2e26e048f9bb62661658b7d4 Mon Sep 17 00:00:00 2001 From: pong Date: Thu, 20 Feb 2025 21:54:41 +0800 Subject: [PATCH] create the tutorial 0: installation --- T0_Installation/Readme.md | 0 T0_Installation/installation.ipynb | 686 +++++++++++++++++++++++++++++ 2 files changed, 686 insertions(+) create mode 100644 T0_Installation/Readme.md create mode 100644 T0_Installation/installation.ipynb diff --git a/T0_Installation/Readme.md b/T0_Installation/Readme.md new file mode 100644 index 0000000..e69de29 diff --git a/T0_Installation/installation.ipynb b/T0_Installation/installation.ipynb new file mode 100644 index 0000000..fc6265f --- /dev/null +++ b/T0_Installation/installation.ipynb @@ -0,0 +1,686 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Installation" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "1. Install ollama: https://ollama.com/download\n", + " - For install Linux: curl -fsSL https://ollama.com/install.sh | sh\n", + "2. pull the LLM model for test: \"ollama pull phi3\"\n", + "3. install conda\n", + "4. create virtual environment\n", + "5. activate the virtual environment\n", + "6. install smolagents" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Install smolagents" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Collecting smolagents" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", + "astra-assistants 2.2.5 requires httpx<0.28.0,>=0.27.0, but you have httpx 0.28.1 which is incompatible.\n", + "e2b 1.0.1 requires httpx<0.28.0,>=0.27.0, but you have httpx 0.28.1 which is incompatible.\n", + "e2b-code-interpreter 1.0.1 requires httpx<0.28.0,>=0.20.0, but you have httpx 0.28.1 which is incompatible.\n", + "gotrue 2.9.3 requires httpx[http2]<0.28,>=0.26, but you have httpx 0.28.1 which is incompatible.\n", + "langchain-google-vertexai 1.0.10 requires httpx<0.28.0,>=0.27.0, but you have httpx 0.28.1 which is incompatible.\n", + "langchain-nvidia-ai-endpoints 0.1.6 requires pillow<11.0.0,>=10.0.0, but you have pillow 11.1.0 which is incompatible.\n", + "langflow-base 0.0.97 requires pandas==2.2.2, but you have pandas 2.2.3 which is incompatible.\n", + "langwatch 0.1.31 requires httpx<0.28.0,>=0.27.0, but you have httpx 0.28.1 which is incompatible.\n", + "postgrest 0.17.2 requires httpx[http2]<0.28,>=0.26, but you have httpx 0.28.1 which is incompatible.\n", + "storage3 0.8.2 requires httpx[http2]<0.28,>=0.26, but you have httpx 0.28.1 which is incompatible.\n", + "supabase 2.9.1 requires httpx<0.28,>=0.26, but you have httpx 0.28.1 which is incompatible.\n", + "supafunc 0.6.2 requires httpx[http2]<0.28,>=0.26, but you have httpx 0.28.1 which is incompatible.\n", + "weaviate-client 4.9.0 requires httpx<=0.27.0,>=0.25.0, but you have httpx 0.28.1 which is incompatible.\n", + "\n", + "[notice] A new release of pip is available: 24.3.1 -> 25.0.1\n", + "[notice] To update, run: python.exe -m pip install --upgrade pip\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + " Downloading smolagents-1.9.2-py3-none-any.whl.metadata (14 kB)\n", + "Collecting huggingface-hub>=0.28.0 (from smolagents)\n", + " Downloading huggingface_hub-0.29.1-py3-none-any.whl.metadata (13 kB)\n", + "Requirement already satisfied: requests>=2.32.3 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from smolagents) (2.32.3)\n", + "Collecting rich>=13.9.4 (from smolagents)\n", + " Using cached rich-13.9.4-py3-none-any.whl.metadata (18 kB)\n", + "Collecting pandas>=2.2.3 (from smolagents)\n", + " Downloading pandas-2.2.3-cp312-cp312-win_amd64.whl.metadata (19 kB)\n", + "Requirement already satisfied: jinja2>=3.1.4 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from smolagents) (3.1.4)\n", + "Collecting pillow>=11.0.0 (from smolagents)\n", + " Downloading pillow-11.1.0-cp312-cp312-win_amd64.whl.metadata (9.3 kB)\n", + "Collecting markdownify>=0.14.1 (from smolagents)\n", + " Downloading markdownify-0.14.1-py3-none-any.whl.metadata (8.5 kB)\n", + "Collecting duckduckgo-search>=6.3.7 (from smolagents)\n", + " Downloading duckduckgo_search-7.4.4-py3-none-any.whl.metadata (17 kB)\n", + "Requirement already satisfied: python-dotenv in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from smolagents) (1.0.1)\n", + "Collecting click>=8.1.8 (from duckduckgo-search>=6.3.7->smolagents)\n", + " Using cached click-8.1.8-py3-none-any.whl.metadata (2.3 kB)\n", + "Collecting httpx>=0.28.1 (from httpx[brotli,http2,socks]>=0.28.1->duckduckgo-search>=6.3.7->smolagents)\n", + " Using cached httpx-0.28.1-py3-none-any.whl.metadata (7.1 kB)\n", + "Requirement already satisfied: lxml>=5.3.0 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from duckduckgo-search>=6.3.7->smolagents) (5.3.0)\n", + "Requirement already satisfied: filelock in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from huggingface-hub>=0.28.0->smolagents) (3.16.1)\n", + "Requirement already satisfied: fsspec>=2023.5.0 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from huggingface-hub>=0.28.0->smolagents) (2024.10.0)\n", + "Requirement already satisfied: packaging>=20.9 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from huggingface-hub>=0.28.0->smolagents) (24.1)\n", + "Requirement already satisfied: pyyaml>=5.1 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from huggingface-hub>=0.28.0->smolagents) (6.0.2)\n", + "Requirement already satisfied: tqdm>=4.42.1 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from huggingface-hub>=0.28.0->smolagents) (4.66.5)\n", + "Requirement already satisfied: typing-extensions>=3.7.4.3 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from huggingface-hub>=0.28.0->smolagents) (4.12.2)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from jinja2>=3.1.4->smolagents) (3.0.2)\n", + "Requirement already satisfied: beautifulsoup4<5,>=4.9 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from markdownify>=0.14.1->smolagents) (4.12.3)\n", + "Requirement already satisfied: six<2,>=1.15 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from markdownify>=0.14.1->smolagents) (1.16.0)\n", + "Requirement already satisfied: numpy>=1.26.0 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from pandas>=2.2.3->smolagents) (1.26.4)\n", + "Requirement already satisfied: python-dateutil>=2.8.2 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from pandas>=2.2.3->smolagents) (2.9.0.post0)\n", + "Requirement already satisfied: pytz>=2020.1 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from pandas>=2.2.3->smolagents) (2024.2)\n", + "Requirement already satisfied: tzdata>=2022.7 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from pandas>=2.2.3->smolagents) (2024.2)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from requests>=2.32.3->smolagents) (3.4.0)\n", + "Requirement already satisfied: idna<4,>=2.5 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from requests>=2.32.3->smolagents) (3.10)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from requests>=2.32.3->smolagents) (2.2.3)\n", + "Requirement already satisfied: certifi>=2017.4.17 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from requests>=2.32.3->smolagents) (2024.8.30)\n", + "Requirement already satisfied: markdown-it-py>=2.2.0 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from rich>=13.9.4->smolagents) (3.0.0)\n", + "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from rich>=13.9.4->smolagents) (2.18.0)\n", + "Requirement already satisfied: soupsieve>1.2 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from beautifulsoup4<5,>=4.9->markdownify>=0.14.1->smolagents) (2.6)\n", + "Requirement already satisfied: colorama in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from click>=8.1.8->duckduckgo-search>=6.3.7->smolagents) (0.4.6)\n", + "Requirement already satisfied: anyio in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from httpx>=0.28.1->httpx[brotli,http2,socks]>=0.28.1->duckduckgo-search>=6.3.7->smolagents) (4.6.2.post1)\n", + "Requirement already satisfied: httpcore==1.* in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from httpx>=0.28.1->httpx[brotli,http2,socks]>=0.28.1->duckduckgo-search>=6.3.7->smolagents) (1.0.6)\n", + "Requirement already satisfied: h11<0.15,>=0.13 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from httpcore==1.*->httpx>=0.28.1->httpx[brotli,http2,socks]>=0.28.1->duckduckgo-search>=6.3.7->smolagents) (0.14.0)\n", + "Collecting brotli (from httpx[brotli,http2,socks]>=0.28.1->duckduckgo-search>=6.3.7->smolagents)\n", + " Downloading Brotli-1.1.0-cp312-cp312-win_amd64.whl.metadata (5.6 kB)\n", + "Requirement already satisfied: h2<5,>=3 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from httpx[brotli,http2,socks]>=0.28.1->duckduckgo-search>=6.3.7->smolagents) (4.1.0)\n", + "Collecting socksio==1.* (from httpx[brotli,http2,socks]>=0.28.1->duckduckgo-search>=6.3.7->smolagents)\n", + " Downloading socksio-1.0.0-py3-none-any.whl.metadata (6.1 kB)\n", + "Requirement already satisfied: mdurl~=0.1 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from markdown-it-py>=2.2.0->rich>=13.9.4->smolagents) (0.1.2)\n", + "Requirement already satisfied: hyperframe<7,>=6.0 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from h2<5,>=3->httpx[brotli,http2,socks]>=0.28.1->duckduckgo-search>=6.3.7->smolagents) (6.0.1)\n", + "Requirement already satisfied: hpack<5,>=4.0 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from h2<5,>=3->httpx[brotli,http2,socks]>=0.28.1->duckduckgo-search>=6.3.7->smolagents) (4.0.0)\n", + "Requirement already satisfied: sniffio>=1.1 in c:\\users\\wingp\\appdata\\roaming\\python\\python312\\site-packages (from anyio->httpx>=0.28.1->httpx[brotli,http2,socks]>=0.28.1->duckduckgo-search>=6.3.7->smolagents) (1.3.1)\n", + "Downloading smolagents-1.9.2-py3-none-any.whl (101 kB)\n", + "Downloading duckduckgo_search-7.4.4-py3-none-any.whl (35 kB)\n", + "Downloading huggingface_hub-0.29.1-py3-none-any.whl (468 kB)\n", + "Downloading markdownify-0.14.1-py3-none-any.whl (11 kB)\n", + "Downloading pandas-2.2.3-cp312-cp312-win_amd64.whl (11.5 MB)\n", + " ---------------------------------------- 0.0/11.5 MB ? eta -:--:--\n", + " - -------------------------------------- 0.5/11.5 MB 2.8 MB/s eta 0:00:04\n", + " ---- ----------------------------------- 1.3/11.5 MB 3.0 MB/s eta 0:00:04\n", + " ------- -------------------------------- 2.1/11.5 MB 3.4 MB/s eta 0:00:03\n", + " ---------- ----------------------------- 2.9/11.5 MB 3.6 MB/s eta 0:00:03\n", + " ------------- -------------------------- 3.9/11.5 MB 3.9 MB/s eta 0:00:02\n", + " ---------------- ----------------------- 4.7/11.5 MB 3.8 MB/s eta 0:00:02\n", + " -------------------- ------------------- 5.8/11.5 MB 4.0 MB/s eta 0:00:02\n", + " ------------------------ --------------- 7.1/11.5 MB 4.3 MB/s eta 0:00:02\n", + " --------------------------- ------------ 7.9/11.5 MB 4.4 MB/s eta 0:00:01\n", + " -------------------------------- ------- 9.4/11.5 MB 4.5 MB/s eta 0:00:01\n", + " ------------------------------------ --- 10.5/11.5 MB 4.6 MB/s eta 0:00:01\n", + " --------------------------------------- 11.3/11.5 MB 4.6 MB/s eta 0:00:01\n", + " ---------------------------------------- 11.5/11.5 MB 4.6 MB/s eta 0:00:00\n", + "Downloading pillow-11.1.0-cp312-cp312-win_amd64.whl (2.6 MB)\n", + " ---------------------------------------- 0.0/2.6 MB ? eta -:--:--\n", + " --------------- ------------------------ 1.0/2.6 MB 5.0 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 2.4/2.6 MB 5.8 MB/s eta 0:00:01\n", + " ---------------------------------------- 2.6/2.6 MB 5.0 MB/s eta 0:00:00\n", + "Using cached rich-13.9.4-py3-none-any.whl (242 kB)\n", + "Using cached click-8.1.8-py3-none-any.whl (98 kB)\n", + "Using cached httpx-0.28.1-py3-none-any.whl (73 kB)\n", + "Downloading socksio-1.0.0-py3-none-any.whl (12 kB)\n", + "Downloading Brotli-1.1.0-cp312-cp312-win_amd64.whl (357 kB)\n", + "Installing collected packages: brotli, socksio, pillow, click, rich, pandas, markdownify, huggingface-hub, httpx, duckduckgo-search, smolagents\n", + " Attempting uninstall: pillow\n", + " Found existing installation: pillow 10.4.0\n", + " Uninstalling pillow-10.4.0:\n", + " Successfully uninstalled pillow-10.4.0\n", + " Attempting uninstall: click\n", + " Found existing installation: click 8.1.7\n", + " Uninstalling click-8.1.7:\n", + " Successfully uninstalled click-8.1.7\n", + " Attempting uninstall: rich\n", + " Found existing installation: rich 13.9.3\n", + " Uninstalling rich-13.9.3:\n", + " Successfully uninstalled rich-13.9.3\n", + " Attempting uninstall: pandas\n", + " Found existing installation: pandas 2.2.2\n", + " Uninstalling pandas-2.2.2:\n", + " Successfully uninstalled pandas-2.2.2\n", + " Attempting uninstall: huggingface-hub\n", + " Found existing installation: huggingface-hub 0.26.1\n", + " Uninstalling huggingface-hub-0.26.1:\n", + " Successfully uninstalled huggingface-hub-0.26.1\n", + " Attempting uninstall: httpx\n", + " Found existing installation: httpx 0.27.0\n", + " Uninstalling httpx-0.27.0:\n", + " Successfully uninstalled httpx-0.27.0\n", + " Attempting uninstall: duckduckgo-search\n", + " Found existing installation: duckduckgo_search 6.3.2\n", + " Uninstalling duckduckgo_search-6.3.2:\n", + " Successfully uninstalled duckduckgo_search-6.3.2\n", + "Successfully installed brotli-1.1.0 click-8.1.8 duckduckgo-search-7.4.4 httpx-0.28.1 huggingface-hub-0.29.1 markdownify-0.14.1 pandas-2.2.3 pillow-11.1.0 rich-13.9.4 smolagents-1.9.2 socksio-1.0.0\n" + ] + } + ], + "source": [ + "!pip install smolagents" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Test installation" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "from typing import Optional\n", + "\n", + "from smolagents import HfApiModel, LiteLLMModel, TransformersModel, tool\n", + "from smolagents.agents import CodeAgent, ToolCallingAgent" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "model = LiteLLMModel(\n", + " model_id=\"ollama_chat/phi3.5\",\n", + " api_base=\"http://localhost:11434\", # replace with remote open-ai compatible server if necessary\n", + " #api_key=\"your-api-key\", # replace with API key if necessary\n", + " num_ctx=8192, # ollama default is 2048 which will often fail horribly. 8192 works for easy tasks, more is better. Check https://huggingface.co/spaces/NyxKrage/LLM-Model-VRAM-Calculator to calculate how much VRAM this will need for the selected model.\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "@tool\n", + "def get_weather(location: str, celsius: Optional[bool] = False) -> str:\n", + " \"\"\"\n", + " Get weather in the next days at given location.\n", + " Secretly this tool does not care about the location, it hates the weather everywhere.\n", + "\n", + " Args:\n", + " location: the location\n", + " celsius: the temperature\n", + " \"\"\"\n", + " return \"The weather is UNGODLY with torrential rains and temperatures below -10°C\"" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
╭──────────────────────────────────────────────────── New run ────────────────────────────────────────────────────╮\n",
+       "                                                                                                                 \n",
+       " What's the weather like in Paris?                                                                               \n",
+       "                                                                                                                 \n",
+       "╰─ LiteLLMModel - ollama_chat/phi3.5 ─────────────────────────────────────────────────────────────────────────────╯\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[38;2;212;183;2m╭─\u001b[0m\u001b[38;2;212;183;2m───────────────────────────────────────────────────\u001b[0m\u001b[38;2;212;183;2m \u001b[0m\u001b[1;38;2;212;183;2mNew run\u001b[0m\u001b[38;2;212;183;2m \u001b[0m\u001b[38;2;212;183;2m───────────────────────────────────────────────────\u001b[0m\u001b[38;2;212;183;2m─╮\u001b[0m\n", + "\u001b[38;2;212;183;2m│\u001b[0m \u001b[38;2;212;183;2m│\u001b[0m\n", + "\u001b[38;2;212;183;2m│\u001b[0m \u001b[1mWhat's the weather like in Paris?\u001b[0m \u001b[38;2;212;183;2m│\u001b[0m\n", + "\u001b[38;2;212;183;2m│\u001b[0m \u001b[38;2;212;183;2m│\u001b[0m\n", + "\u001b[38;2;212;183;2m╰─\u001b[0m\u001b[38;2;212;183;2m LiteLLMModel - ollama_chat/phi3.5 \u001b[0m\u001b[38;2;212;183;2m────────────────────────────────────────────────────────────────────────────\u001b[0m\u001b[38;2;212;183;2m─╯\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ Step 1 ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[38;2;212;183;2m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ \u001b[0m\u001b[1mStep \u001b[0m\u001b[1;36m1\u001b[0m\u001b[38;2;212;183;2m ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
Error in generating tool call with model:\n",
+       "'NoneType' object is not iterable\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1;31mError in generating tool call with model:\u001b[0m\n", + "\u001b[1;31m'NoneType'\u001b[0m\u001b[1;31m object is not iterable\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
[Step 0: Duration 43.02 seconds| Input tokens: 1,339 | Output tokens: 28]\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[2m[Step 0: Duration 43.02 seconds| Input tokens: 1,339 | Output tokens: 28]\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ Step 2 ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[38;2;212;183;2m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ \u001b[0m\u001b[1mStep \u001b[0m\u001b[1;36m2\u001b[0m\u001b[38;2;212;183;2m ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
Error in generating tool call with model:\n",
+       "'NoneType' object is not iterable\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1;31mError in generating tool call with model:\u001b[0m\n", + "\u001b[1;31m'NoneType'\u001b[0m\u001b[1;31m object is not iterable\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
[Step 1: Duration 5.91 seconds| Input tokens: 2,510 | Output tokens: 51]\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[2m[Step 1: Duration 5.91 seconds| Input tokens: 2,510 | Output tokens: 51]\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ Step 3 ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[38;2;212;183;2m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ \u001b[0m\u001b[1mStep \u001b[0m\u001b[1;36m3\u001b[0m\u001b[38;2;212;183;2m ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
Error in generating tool call with model:\n",
+       "'NoneType' object is not iterable\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1;31mError in generating tool call with model:\u001b[0m\n", + "\u001b[1;31m'NoneType'\u001b[0m\u001b[1;31m object is not iterable\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
[Step 2: Duration 3.63 seconds| Input tokens: 3,953 | Output tokens: 79]\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[2m[Step 2: Duration 3.63 seconds| Input tokens: 3,953 | Output tokens: 79]\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ Step 4 ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[38;2;212;183;2m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ \u001b[0m\u001b[1mStep \u001b[0m\u001b[1;36m4\u001b[0m\u001b[38;2;212;183;2m ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
Error in generating tool call with model:\n",
+       "'NoneType' object is not iterable\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1;31mError in generating tool call with model:\u001b[0m\n", + "\u001b[1;31m'NoneType'\u001b[0m\u001b[1;31m object is not iterable\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
[Step 3: Duration 5.53 seconds| Input tokens: 5,216 | Output tokens: 109]\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[2m[Step 3: Duration 5.53 seconds| Input tokens: 5,216 | Output tokens: 109]\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ Step 5 ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[38;2;212;183;2m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ \u001b[0m\u001b[1mStep \u001b[0m\u001b[1;36m5\u001b[0m\u001b[38;2;212;183;2m ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
Error in generating tool call with model:\n",
+       "'NoneType' object is not iterable\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1;31mError in generating tool call with model:\u001b[0m\n", + "\u001b[1;31m'NoneType'\u001b[0m\u001b[1;31m object is not iterable\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
[Step 4: Duration 4.10 seconds| Input tokens: 6,763 | Output tokens: 143]\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[2m[Step 4: Duration 4.10 seconds| Input tokens: 6,763 | Output tokens: 143]\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ Step 6 ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[38;2;212;183;2m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ \u001b[0m\u001b[1mStep \u001b[0m\u001b[1;36m6\u001b[0m\u001b[38;2;212;183;2m ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
Error in generating tool call with model:\n",
+       "'NoneType' object is not iterable\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1;31mError in generating tool call with model:\u001b[0m\n", + "\u001b[1;31m'NoneType'\u001b[0m\u001b[1;31m object is not iterable\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
[Step 5: Duration 6.11 seconds| Input tokens: 8,118 | Output tokens: 166]\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[2m[Step 5: Duration 6.11 seconds| Input tokens: 8,118 | Output tokens: 166]\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
Reached max steps.\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1;31mReached max steps.\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
[Step 6: Duration 6.11 seconds| Input tokens: 8,520 | Output tokens: 458]\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[2m[Step 6: Duration 6.11 seconds| Input tokens: 8,520 | Output tokens: 458]\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ToolCallingAgent: I apologize for the confusion earlier due to technical issues with generating predictions directly from my side. However, I can certainly share some general information about it! Please note that real-time and accurate weather updates would require accessing a live data source or service:\n", + "\n", + "As of now, since we don't have an immediate internet connection at this platform for fetching the latest details, here is how you could find out the current conditions in Paris using online tools such as AccuWeather, The Weather Channel, or BBC weather. Here are some steps to follow based on your preferred method:\n", + "\n", + "1. Visit any of these websites and enter 'Paris' into their search fields. They should provide a reliable forecast for you therein.\n", + "2. You could also use voice-activated assistants like Siri, Google Assistant or Alexa by simply saying \"Hey Siri/Google Assistant, what’s the weather in Paris today?\" \n", + "3. Alternatively, download and install an app from the App Store (iOS) or Play Store (Android), then search for 'Weather' to find local forecasts like DarkSky Weather or AccuWeather apps which would provide detailed information including temperature, humidity, wind conditions etc., specifics of Paris.\n", + "\n", + "Remember weather can change rapidly so it’s always good practice checking a few minutes before heading out!\n" + ] + } + ], + "source": [ + "agent = ToolCallingAgent(tools=[get_weather], model=model)\n", + "\n", + "print(\"ToolCallingAgent:\", agent.run(\"What's the weather like in Paris?\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
╭──────────────────────────────────────────────────── New run ────────────────────────────────────────────────────╮\n",
+       "                                                                                                                 \n",
+       " What's the weather like in Paris?                                                                               \n",
+       "                                                                                                                 \n",
+       "╰─ LiteLLMModel - ollama_chat/phi3.5 ─────────────────────────────────────────────────────────────────────────────╯\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[38;2;212;183;2m╭─\u001b[0m\u001b[38;2;212;183;2m───────────────────────────────────────────────────\u001b[0m\u001b[38;2;212;183;2m \u001b[0m\u001b[1;38;2;212;183;2mNew run\u001b[0m\u001b[38;2;212;183;2m \u001b[0m\u001b[38;2;212;183;2m───────────────────────────────────────────────────\u001b[0m\u001b[38;2;212;183;2m─╮\u001b[0m\n", + "\u001b[38;2;212;183;2m│\u001b[0m \u001b[38;2;212;183;2m│\u001b[0m\n", + "\u001b[38;2;212;183;2m│\u001b[0m \u001b[1mWhat's the weather like in Paris?\u001b[0m \u001b[38;2;212;183;2m│\u001b[0m\n", + "\u001b[38;2;212;183;2m│\u001b[0m \u001b[38;2;212;183;2m│\u001b[0m\n", + "\u001b[38;2;212;183;2m╰─\u001b[0m\u001b[38;2;212;183;2m LiteLLMModel - ollama_chat/phi3.5 \u001b[0m\u001b[38;2;212;183;2m────────────────────────────────────────────────────────────────────────────\u001b[0m\u001b[38;2;212;183;2m─╯\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ Step 1 ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[38;2;212;183;2m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ \u001b[0m\u001b[1mStep \u001b[0m\u001b[1;36m1\u001b[0m\u001b[38;2;212;183;2m ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
Executing parsed code: ──────────────────────────────────────────────────────────────────────────────────────── \n",
+       "  weather_in_paris = get_weather(location=\"Paris\", celsius=False)  # Setting 'celsius' to False indicates we want  \n",
+       "  the temperature in degrees Fahrenheit.                                                                           \n",
+       "  final_answer(f\"The weather in Paris is currently {weather_in_paris}.\")                                           \n",
+       " ───────────────────────────────────────────────────────────────────────────────────────────────────────────────── \n",
+       "
\n" + ], + "text/plain": [ + " ─ \u001b[1mExecuting parsed code:\u001b[0m ──────────────────────────────────────────────────────────────────────────────────────── \n", + " \u001b[38;2;248;248;242;48;2;39;40;34mweather_in_paris\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m=\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34mget_weather\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m(\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34mlocation\u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m=\u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"\u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mParis\u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34mcelsius\u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m=\u001b[0m\u001b[38;2;102;217;239;48;2;39;40;34mFalse\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m)\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;149;144;119;48;2;39;40;34m# Setting 'celsius' to False indicates we want\u001b[0m \n", + " \u001b[38;2;149;144;119;48;2;39;40;34mthe temperature in degrees Fahrenheit.\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \n", + " \u001b[38;2;248;248;242;48;2;39;40;34mfinal_answer\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m(\u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mf\u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"\u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mThe weather in Paris is currently \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m{\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34mweather_in_paris\u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m}\u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m.\u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m)\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \n", + " ───────────────────────────────────────────────────────────────────────────────────────────────────────────────── \n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
Out - Final answer: The weather in Paris is currently The weather is UNGODLY with torrential rains and temperatures\n",
+       "below -10°C.\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1;38;2;212;183;2mOut - Final answer: The weather in Paris is currently The weather is UNGODLY with torrential rains and temperatures\u001b[0m\n", + "\u001b[1;38;2;212;183;2mbelow -10°C.\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
[Step 0: Duration 83.01 seconds| Input tokens: 2,490 | Output tokens: 166]\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[2m[Step 0: Duration 83.01 seconds| Input tokens: 2,490 | Output tokens: 166]\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CodeAgent: The weather in Paris is currently The weather is UNGODLY with torrential rains and temperatures below -10°C.\n" + ] + } + ], + "source": [ + "agent = CodeAgent(tools=[get_weather], model=model)\n", + "\n", + "print(\"CodeAgent:\", agent.run(\"What's the weather like in Paris?\"))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "smolagent", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.9" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +}