{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "Import the modules of smolagent" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from typing import Optional\n", "\n", "from smolagents import HfApiModel, LiteLLMModel, TransformersModel, tool\n", "from smolagents.agents import CodeAgent, ToolCallingAgent\n", "from smolagents import DuckDuckGoSearchTool" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Get the LLM model from ollama" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "model = LiteLLMModel(\n", " model_id=\"ollama_chat/llama3.1\",\n", " api_base=\"http://192.168.0.117:11434\", # replace with remote open-ai compatible server if necessary\n", " #api_key=\"your-api-key\", # replace with API key if necessary\n", " #num_ctx=8192, # ollama default is 2048 which will often fail horribly. 8192 works for easy tasks, more is better. Check https://huggingface.co/spaces/NyxKrage/LLM-Model-VRAM-Calculator to calculate how much VRAM this will need for the selected model.\n", " )" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Assign the tools and model to agent" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "agent = ToolCallingAgent(tools=[DuckDuckGoSearchTool()], model=model)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Create task for agent (prompt)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# prompt = \"What is the capital of France?\"\n", "# promt = \"What is the capital of Germany?\"\n", "prompt = \"who is the current world champion in football?\"" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Run Agent" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "agent.run(prompt)" ] } ], "metadata": { "kernelspec": { "display_name": "smolagent", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.12.9" } }, "nbformat": 4, "nbformat_minor": 2 }