diff --git a/quickstarts/Function_calling.ipynb b/quickstarts/Function_calling.ipynb index 29c40fbf4..a34e99da5 100644 --- a/quickstarts/Function_calling.ipynb +++ b/quickstarts/Function_calling.ipynb @@ -11,7 +11,7 @@ }, { "cell_type": "code", - "execution_count": 45, + "execution_count": 1, "metadata": { "cellView": "form", "id": "tuOe1ymfHZPu" @@ -57,7 +57,7 @@ "source": [ " Function calling lets developers create a description of a function in their code, then pass that description to a language model in a request. The response from the model includes the name of a function that matches the description and the arguments to call it with. Function calling lets you use functions as tools in generative AI applications, and you can define more than one function within a single request.\n", "\n", - "This notebook provides code examples to help you get started. The documentation's [quickstart](https://ai.google.dev/tutorials/function_calling_python_quickstart) is also a good place to start understanding function calling." + "This notebook provides code examples to help you get started. The documentation's [quickstart](https://ai.google.dev/gemini-api/docs/function-calling#python) is also a good place to start understanding function calling." ] }, { @@ -80,32 +80,26 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 2, "metadata": { - "id": "9OEoeosRTv-5" + "id": "9OEoeosRTv-5", + "outputId": "bcaac979-fab0-41c2-abed-0059bf352aed", + "colab": { + "base_uri": "https://localhost:8080/" + } }, "outputs": [ { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ - "Note: you may need to restart the kernel to use updated packages.\n" + "\u001b[?25l \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m0.0/159.7 kB\u001b[0m \u001b[31m?\u001b[0m eta \u001b[36m-:--:--\u001b[0m\r\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m159.7/159.7 kB\u001b[0m \u001b[31m6.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25h" ] } ], "source": [ - "%pip install -qU 'google-genai'" - ] - }, - { - "cell_type": "code", - "execution_count": 47, - "metadata": { - "id": "TS9l5igubpHO" - }, - "outputs": [], - "source": [ - "from google import genai" + "%pip install -qU 'google-genai>=1.0.0'" ] }, { @@ -121,12 +115,13 @@ }, { "cell_type": "code", - "execution_count": 48, + "execution_count": 3, "metadata": { "id": "ab9ASynfcIZn" }, "outputs": [], "source": [ + "from google import genai\n", "from google.colab import userdata\n", "\n", "GOOGLE_API_KEY = userdata.get(\"GOOGLE_API_KEY\")\n", @@ -146,7 +141,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "metadata": { "id": "sEK4ZDVGqJ5H" }, @@ -161,7 +156,7 @@ "id": "3f383614ec30" }, "source": [ - "## Function calling basics" + "## Setting up Functions as Tools" ] }, { @@ -172,294 +167,519 @@ "source": [ "To use function calling, pass a list of functions to the `tools` parameter when creating a [`GenerativeModel`](https://ai.google.dev/api/python/google/generativeai/GenerativeModel). The model uses the function name, docstring, parameters, and parameter type annotations to decide if it needs the function to best answer a prompt.\n", "\n", - "> Important: The SDK converts function parameter type annotations to a format the API understands (`genai.types.FunctionDeclaration`). The API only supports a limited selection of parameter types, and the Python SDK's automatic conversion only supports a subset of that: `AllowedTypes = int | float | bool | str | list['AllowedTypes'] | dict`" + "> Important: The SDK converts function parameter type annotations to a format the API understands (`genai.types.FunctionDeclaration`). The API only supports a limited selection of parameter types, and the Python SDK's automatic conversion only supports a subset of that: `AllowedTypes = int | float | bool | str | list['AllowedTypes'] | dict`\n", + "\n", + "\n", + "**Example: Lighting System Functions**\n", + "\n", + "Here are 3 functions controlling a hypothetical lighting system. Note the docstrings and type hints." ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 5, "metadata": { - "id": "42b27b02d2f5" + "id": "C8J_H1hSp4m-" }, "outputs": [], "source": [ - "from google.genai import types\n", - "def add(a: float, b: float):\n", - " \"\"\"returns a + b.\"\"\"\n", - " return a + b\n", - "\n", - "\n", - "def subtract(a: float, b: float):\n", - " \"\"\"returns a - b.\"\"\"\n", - " return a - b\n", - "\n", - "\n", - "def multiply(a: float, b: float):\n", - " \"\"\"returns a * b.\"\"\"\n", - " return a * b\n", + "def enable_lights():\n", + " \"\"\"Turn on the lighting system.\"\"\"\n", + " print(\"LIGHTBOT: Lights enabled.\")\n", "\n", "\n", - "def divide(a: float, b: float):\n", - " \"\"\"returns a / b.\"\"\"\n", - " return a / b\n", + "def set_light_color(rgb_hex: str):\n", + " \"\"\"Set the light color. Lights must be enabled for this to work.\"\"\"\n", + " print(f\"LIGHTBOT: Lights set to {rgb_hex}.\")\n", "\n", + "def stop_lights():\n", + " \"\"\"Stop flashing lights.\"\"\"\n", + " print(\"LIGHTBOT: Lights turned off.\")\n", "\n", - "operation_tools = [add, subtract, multiply, divide]" + "light_controls = [enable_lights, set_light_color, stop_lights]\n", + "instruction = \"\"\"\n", + " You are a helpful lighting system bot. You can turn\n", + " lights on and off, and you can set the color. Do not perform any\n", + " other tasks.\n", + "\"\"\"" ] }, { "cell_type": "markdown", "metadata": { - "id": "UzUgtaY99BTg" + "id": "Ry0JsK405KwS" }, "source": [ - "## Automatic function calling" + "## Basic Function Calling with Chat" ] }, { "cell_type": "markdown", "metadata": { - "id": "d5fd91032a1e" + "id": "9l4wdq8b5Nuy" }, "source": [ - "Function calls naturally fit in to [multi-turn chats](https://ai.google.dev/gemini-api/docs/text-generation?lang=python#chat) as they capture a back and forth interaction between the user and model. The Python SDK's [`Chat` Session](https://googleapis.github.io/python-genai/index.html#chats) is a great interface for chats because handles the conversation history for you, and using the parameter `automatic_function_calling` (enabled by default) simplifies function calling even further:" + "Function calls naturally fit into multi-turn conversations. The Python SDK's `ChatSession (client.chats.create(...))` is ideal for this, as it automatically handles conversation history.\n", + "\n", + "Furthermore, `ChatSession` simplifies function calling execution via its `automatic_function_calling` feature (enabled by default), which will be explored more later. For now, let's see a basic interaction where the model decides to call a function." ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 6, "metadata": { - "id": "d3b91c855257" + "id": "-yuQ2gCY5ujD", + "outputId": "fd4db9a6-216c-46b6-d94c-2c2cde571564", + "colab": { + "base_uri": "https://localhost:8080/" + } }, - "outputs": [], + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "LIGHTBOT: Lights enabled.\n", + "OK. I've turned the lights on.\n", + "\n" + ] + } + ], "source": [ "chat = client.chats.create(\n", - " model = MODEL_ID,\n", - " config = {\n", - " \"tools\": operation_tools,\n", - " \"automatic_function_calling\": {\"disable\": False} # This line is not needed as automatic_function_calling is enabled by default\n", + " model=MODEL_ID,\n", + " config={\n", + " \"tools\": light_controls,\n", + " \"system_instruction\": instruction,\n", + " # automatic_function_calling defaults to enabled\n", " }\n", - ")" + ")\n", + "\n", + "response = chat.send_message(\"It's awful dark in here...\")\n", + "\n", + "print(response.text)" ] }, { "cell_type": "markdown", "metadata": { - "id": "1481a6159399" + "id": "q1UsMG3FqYrC" }, "source": [ - "With automatic function calling enabled, `Chat.send_message` automatically calls your function if the model asks it to.\n", + "## Examining Function Calls and Execution History\n", + "\n", + "To understand what happened in the background, you can examine the chat history.\n", + "\n", + "The `Chat.history` property stores a chronological record of the conversation between the user and the Gemini model. You can get the history using `Chat.get_history()`. Each turn in the conversation is represented by a `genai.types.Content` object, which contains the following information:\n", "\n", - "In the following example, the result appears to simply be a text response containing the correct answer:" + "**Role**: Identifies whether the content originated from the \"user\" or the \"model\".\n", + "\n", + "**Parts**: A list of genai.types.Part objects that represent individual components of the message. With a text-only model, these parts can be:\n", + "\n", + "* **Text**: Plain text messages.\n", + "* **Function Call (genai.types.FunctionCall)**: A request from the model to execute a specific function with provided arguments.\n", + "* **Function Response (genai.types.FunctionResponse)**: The result returned by the user after executing the requested function.\n" ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 7, "metadata": { - "id": "81d8def3d865" + "id": "SBNAqSexqZlZ", + "outputId": "4c4b0431-9a94-48cc-ab5b-6a5e1363029e", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 335 + } }, "outputs": [ { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "###user:" + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "It's awful dark in here..." + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "--------------------------------------------------------------------------------\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "###model:" + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function call: { id=None args={} name='enable_lights' }\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "###user:" + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function response: { id=None name='enable_lights' response={'result': None} }\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "###model:" + }, + "metadata": {} + }, + { + "output_type": "display_data", "data": { "text/plain": [ - "'That would be 2508 mittens in total.'" - ] + "" + ], + "text/markdown": "OK. I've turned the lights on.\n" }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "--------------------------------------------------------------------------------\n" + ] } ], "source": [ - "response = chat.send_message(\n", - " \"I have 57 cats, each owns 44 mittens, how many mittens is that in total?\"\n", - ")\n", - "response.text" + "from IPython.display import Markdown, display\n", + "\n", + "def print_history(chat):\n", + " for content in chat.get_history():\n", + " display(Markdown(\"###\" + content.role + \":\"))\n", + " for part in content.parts:\n", + " if part.text:\n", + " display(Markdown(part.text))\n", + " if part.function_call:\n", + " print(\"Function call: {\", part.function_call, \"}\")\n", + " if part.function_response:\n", + " print(\"Function response: {\", part.function_response, \"}\")\n", + " print(\"-\" * 80)\n", + "\n", + "print_history(chat)" ] }, { - "cell_type": "code", - "execution_count": 7, + "cell_type": "markdown", "metadata": { - "id": "951c0f83f72e" + "id": "CS84-2yG7A--" }, - "outputs": [ - { - "data": { - "text/plain": [ - "2508" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], "source": [ - "57 * 44" + "This history shows the flow:\n", + "\n", + "1. **User**: Sends the message.\n", + "\n", + "2. **Model**: Responds not with text, but with a `FunctionCall` requesting `enable_lights`.\n", + "\n", + "3. **User (SDK)**: The `ChatSession` automatically executes `enable_lights()` because `automatic_function_calling` is enabled. It sends the result back as a `FunctionResponse`.\n", + "\n", + "4. **Model**: Uses the function's result (\"Lights enabled.\") to formulate the final text response." ] }, { "cell_type": "markdown", "metadata": { - "id": "7731e35f2383" + "id": "CsCZArT47p5T" }, "source": [ - "However, by examining the chat history, you can see the flow of the conversation and how function calls are integrated within it.\n", + "## Automatic Function Execution (Python SDK Feature)\n", + "\n", + "As demonstrated above, the `ChatSession` in the Python SDK has a powerful feature called Automatic Function Execution. When enabled (which it is by default), if the model responds with a FunctionCall, the SDK will:\n", + "\n", + "1. Find the corresponding Python function in the provided `tools`.\n", "\n", - "The `Chat.history` property stores a chronological record of the conversation between the user and the Gemini model. You can get the history using `Chat.get_history()`. Each turn in the conversation is represented by a [`genai.types.Content`](https://googleapis.github.io/python-genai/genai.html#genai.types.Content) object, which contains the following information:\n", + "2. Execute the function with the arguments provided by the model.\n", "\n", - "* **Role**: Identifies whether the content originated from the \"user\" or the \"model\".\n", - "* **Parts**: A list of [`genai.types.Part`](https://googleapis.github.io/python-genai/genai.html#genai.types.Part) objects that represent individual components of the message. With a text-only model, these parts can be:\n", - " * **Text**: Plain text messages.\n", - " * **Function Call** ([`genai.types.FunctionCall`](https://googleapis.github.io/python-genai/genai.html#genai.types.FunctionCall)): A request from the model to execute a specific function with provided arguments.\n", - " * **Function Response** ([`genai.types.FunctionResponse`](https://googleapis.github.io/python-genai/genai.html#genai.types.FunctionResponse)): The result returned by the user after executing the requested function.\n", + "3. Send the function's return value back to the model in a `FunctionResponse`.\n", "\n", - " In the previous example with the mittens calculation, the history shows the following sequence:\n", + "4. Return only the model's final response (usually text) to your code.\n", "\n", - "1. **User**: Asks the question about the total number of mittens.\n", - "1. **Model**: Determines that the multiply function is helpful and sends a FunctionCall request to the user.\n", - "1. **User**: The `Chat` session automatically executes the function (due to `_automatic_function_calling` being set) and sends back a `FunctionResponse` with the calculated result.\n", - "1. **Model**: Uses the function's output to formulate the final answer and presents it as a text response." + "This significantly simplifies the workflow for common use cases.\n", + "\n", + "**Example: Math Operations**" ] }, { "cell_type": "code", "execution_count": 8, "metadata": { - "id": "9f7eff1e8e60" + "id": "r1FnK3EB8jgQ", + "outputId": "5e9ba6bb-7436-4952-c2c8-9aa823977d55", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "That would be 2508 mittens in total.\n" + ] + } + ], + "source": [ + "from google.genai import types # Ensure types is imported\n", + "\n", + "def add(a: float, b: float):\n", + " \"\"\"returns a + b.\"\"\"\n", + " return a + b\n", + "\n", + "def subtract(a: float, b: float):\n", + " \"\"\"returns a - b.\"\"\"\n", + " return a - b\n", + "\n", + "def multiply(a: float, b: float):\n", + " \"\"\"returns a * b.\"\"\"\n", + " return a * b\n", + "\n", + "def divide(a: float, b: float):\n", + " \"\"\"returns a / b.\"\"\"\n", + " if b == 0:\n", + " return \"Cannot divide by zero.\"\n", + " return a / b\n", + "\n", + "operation_tools = [add, subtract, multiply, divide]\n", + "\n", + "chat = client.chats.create(\n", + " model=MODEL_ID,\n", + " config={\n", + " \"tools\": operation_tools,\n", + " \"automatic_function_calling\": {\"disable\": False} # Enabled by default\n", + " }\n", + ")\n", + "\n", + "response = chat.send_message(\n", + " \"I have 57 cats, each owns 44 mittens, how many mittens is that in total?\"\n", + ")\n", + "\n", + "print(response.text)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "id": "cU2TO5-S8tmp", + "outputId": "e4166414-ce7c-425e-a7b9-770a5c385dd0", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 335 + } }, "outputs": [ { + "output_type": "display_data", "data": { - "text/markdown": [ - "###user:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###user:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "I have 57 cats, each owns 44 mittens, how many mittens is that in total?" - ], "text/plain": [ "" - ] + ], + "text/markdown": "I have 57 cats, each owns 44 mittens, how many mittens is that in total?" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ "--------------------------------------------------------------------------------\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###model:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###model:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ "Function call: { id=None args={'a': 57, 'b': 44} name='multiply' }\n", "--------------------------------------------------------------------------------\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###user:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###user:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ "Function response: { id=None name='multiply' response={'result': 2508} }\n", "--------------------------------------------------------------------------------\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###model:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###model:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "That would be 2508 mittens in total." - ], "text/plain": [ "" - ] + ], + "text/markdown": "That would be 2508 mittens in total." }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ "--------------------------------------------------------------------------------\n" ] } ], "source": [ - "from IPython.display import Markdown, display\n", - "\n", - "for content in chat.get_history():\n", - " display(Markdown(\"###\" + content.role + \":\"))\n", - " for part in content.parts:\n", - " if part.text:\n", - " display(Markdown(part.text))\n", - " if part.function_call:\n", - " print(\"Function call: {\", part.function_call, \"}\")\n", - " if part.function_response:\n", - " print(\"Function response: {\", part.function_response, \"}\")\n", - " print(\"-\" * 80)\n" + "print_history(chat)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "a8A6qJ668ywT" + }, + "source": [ + "Automatic execution handled the `multiply` call seamlessly." ] }, { "cell_type": "markdown", "metadata": { - "id": "2471fd72f05e" + "id": "1BzsV6MxLnZD" + }, + "source": [ + "## Automatic Function Schema Declaration\n", + "\n", + "A key convenience of the Python SDK is its ability to automatically generate the required `FunctionDeclaration` schema from your Python functions. It inspects:\n", + "\n", + "- **Function Name**: (`func.__name__`)\n", + "\n", + "- **Docstring**: Used for the function's description.\n", + "\n", + "- **Parameters**: Names and type annotations (`int`, `str`, `float`, `bool`, `list`, `dict`). Docstrings for parameters (if using specific formats like Google style) can also enhance the description.\n", + "\n", + "- **Return Type Annotation**: Although not strictly used by the model for deciding which function to call, it's good practice.\n", + "\n", + "You generally don't need to create `FunctionDeclaration` objects manually when using Python functions directly as tools.\n", + "\n", + "However, you can generate the schema explicitly using `genai.types.FunctionDeclaration.from_callable` if you need to inspect it, modify it, or use it in scenarios where you don't have the Python function object readily available." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "id": "qrYRieAuL2hs", + "outputId": "666c1cec-42e2-4e36-ed7b-0ea42ec06b8d", + "colab": { + "base_uri": "https://localhost:8080/" + } }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "{\n", + " \"description\": \"Set the light color. Lights must be enabled for this to work.\",\n", + " \"name\": \"set_light_color\",\n", + " \"parameters\": {\n", + " \"properties\": {\n", + " \"rgb_hex\": {\n", + " \"type\": \"STRING\"\n", + " }\n", + " },\n", + " \"required\": [\n", + " \"rgb_hex\"\n", + " ],\n", + " \"type\": \"OBJECT\"\n", + " }\n", + "}\n" + ] + } + ], "source": [ - "In general the state diagram is:\n", + "import json\n", "\n", - "\"The\n", + "set_color_declaration = types.FunctionDeclaration.from_callable(\n", + " callable = set_light_color,\n", + " client = client\n", + ")\n", "\n", - "The model can respond with multiple function calls before returning a text response, and function calls come before the text response." + "print(json.dumps(set_color_declaration.to_json_dict(), indent=4))" ] }, { @@ -477,7 +697,7 @@ "id": "9610f3465a69" }, "source": [ - "For more control, you can process [`genai.types.FunctionCall`](https://googleapis.github.io/python-genai/genai.html#genai.types.FunctionCall) requests from the model yourself. This would be the case if:\n", + "For more control, or if automatic function calling is not available, you can process [`genai.types.FunctionCall`](https://googleapis.github.io/python-genai/genai.html#genai.types.FunctionCall) requests from the model yourself. This would be the case if:\n", "\n", "- You use a `Chat` with the default `\"automatic_function_calling\": {\"disable\": False}`.\n", "- You use [`Client.model.generate_content`](https://googleapis.github.io/python-genai/genai.html#genai.types.) (and manage the chat history yourself)." @@ -489,12 +709,14 @@ "id": "34ffab0bf365" }, "source": [ + "**Example: Movies**\n", + "\n", "The following example is a rough equivalent of the [function calling single-turn curl sample](https://ai.google.dev/docs/function_calling#function-calling-single-turn-curl-sample) in Python. It uses functions that return (mock) movie playtime information, possibly from a hypothetical API:" ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 11, "metadata": { "id": "46ba0fa3d09a" }, @@ -546,33 +768,42 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 17, "metadata": { - "id": "5e3b9c84d883" + "id": "5e3b9c84d883", + "outputId": "7fcc3996-258e-4437-e93f-4efdfd94e593", + "colab": { + "base_uri": "https://localhost:8080/" + } }, "outputs": [ { - "data": { - "text/plain": [ - "[Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=FunctionCall(id=None, args={'location': 'Mountain View, CA', 'movie': 'Barbie'}, name='find_theaters'), function_response=None, inline_data=None, text=None)]" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" + "output_type": "stream", + "name": "stdout", + "text": [ + "{\n", + " \"function_call\": {\n", + " \"args\": {\n", + " \"location\": \"Mountain View, CA\",\n", + " \"movie\": \"Barbie movie\"\n", + " },\n", + " \"name\": \"find_theaters\"\n", + " }\n", + "}\n" + ] } ], "source": [ "response = client.models.generate_content(\n", " model=MODEL_ID,\n", - " contents=\"Which theaters in Mountain View show the Barbie movie?\",\n", + " contents=\"Which theaters in Mountain View, CA show the Barbie movie?\",\n", " config = {\n", " \"tools\": theater_functions,\n", - " \"automatic_function_calling\": {\"disable\": True} # This line is not needed as automatic_function_calling is enabled by default\n", + " \"automatic_function_calling\": {\"disable\": True}\n", " }\n", ")\n", "\n", - "response.candidates[0].content.parts" + "print(json.dumps(response.candidates[0].content.parts[0].to_json_dict(), indent=4))" ] }, { @@ -596,14 +827,18 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 19, "metadata": { - "id": "rjkZ8MA00Coc" + "id": "rjkZ8MA00Coc", + "outputId": "9786d483-0efb-4a36-c163-0a19b386b4b7", + "colab": { + "base_uri": "https://localhost:8080/" + } }, "outputs": [ { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ "['Googleplex 16', 'Android Theatre']\n" ] @@ -639,16 +874,20 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 20, "metadata": { - "id": "xr13VGnJAgZv" + "id": "xr13VGnJAgZv", + "outputId": "b743e2f2-39d4-48f9-a87e-c6a9089016a1", + "colab": { + "base_uri": "https://localhost:8080/" + } }, "outputs": [ { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ - "The Barbie movie is currently playing at the Googleplex 16 and Android Theatre in Mountain View.\n" + "The Barbie movie is playing at Googleplex 16 and Android Theatre in Mountain View.\n" ] } ], @@ -675,679 +914,1271 @@ { "cell_type": "markdown", "metadata": { - "id": "94a52c498cb8" + "id": "8ZFiMVth9Kjb" + }, + "source": [ + "This demonstrates the manual workflow: call, check, execute, respond, call again." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "EuwKoNIhGBJN" + }, + "source": [ + "## Parallel function calls\n", + "\n", + "The Gemini API can call multiple functions in a single turn. This caters for scenarios where there are multiple function calls that can take place independently to complete a task.\n", + "\n", + "First set the tools up. Unlike the movie example above, these functions do not require input from each other to be called so they should be good candidates for parallel calling." + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "id": "cJ-mSixWGqLv" }, + "outputs": [], "source": [ - "## Function calling chain\n", + "def power_disco_ball(power: bool) -> bool:\n", + " \"\"\"Powers the spinning disco ball.\"\"\"\n", + " print(f\"Disco ball is {'spinning!' if power else 'stopped.'}\")\n", + " return True\n", + "\n", + "def start_music(energetic: bool, loud: bool, bpm: int) -> str:\n", + " \"\"\"Play some music matching the specified parameters.\n", + "\n", + " Args:\n", + " energetic: Whether the music is energetic or not.\n", + " loud: Whether the music is loud or not.\n", + " bpm: The beats per minute of the music.\n", + "\n", + " Returns: The name of the song being played.\n", + " \"\"\"\n", + " print(f\"Starting music! {energetic=} {loud=}, {bpm=}\")\n", + " return \"Never gonna give you up.\"\n", + "\n", + "\n", + "def dim_lights(brightness: float) -> bool:\n", + " \"\"\"Dim the lights.\n", + "\n", + " Args:\n", + " brightness: The brightness of the lights, 0.0 is off, 1.0 is full.\n", + " \"\"\"\n", + " print(f\"Lights are now set to {brightness:.0%}\")\n", + " return True\n", "\n", - "The model is not limited to one function call, it can chain them until it finds the right answer." + "house_fns = [power_disco_ball, start_music, dim_lights]" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "zlrmXN7fxQi0" + }, + "source": [ + "Now call the model with an instruction that could use all of the specified tools." ] }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 22, "metadata": { - "id": "809deb79f194" + "id": "21ecYHLgIsCl", + "outputId": "ab0b8f88-9937-4910-812b-420920417dac", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + } }, "outputs": [ { + "output_type": "stream", + "name": "stdout", + "text": [ + "Starting music! energetic=True loud=True, bpm=120\n", + "Lights are now set to 50%\n", + "Disco ball is spinning!\n", + "Starting music! energetic=True loud=True, bpm=120\n", + "Lights are now set to 50%\n", + "Disco ball is spinning!\n", + "Starting music! energetic=True loud=True, bpm=120\n", + "Lights are now set to 50%\n", + "Disco ball is spinning!\n", + "Starting music! energetic=True loud=True, bpm=120\n", + "Lights are now set to 50%\n", + "Disco ball is spinning!\n", + "Starting music! energetic=True loud=True, bpm=120\n", + "Lights are now set to 50%\n", + "Disco ball is spinning!\n", + "Starting music! energetic=True loud=True, bpm=120\n", + "Lights are now set to 50%\n", + "Disco ball is spinning!\n", + "Starting music! energetic=True loud=True, bpm=120\n", + "Lights are now set to 50%\n", + "Disco ball is spinning!\n", + "Starting music! energetic=True loud=True, bpm=120\n", + "Lights are now set to 50%\n", + "Disco ball is spinning!\n", + "Starting music! energetic=True loud=True, bpm=120\n", + "Lights are now set to 50%\n", + "Disco ball is spinning!\n", + "Starting music! energetic=True loud=True, bpm=120\n", + "Lights are now set to 50%\n", + "Disco ball is spinning!\n" + ] + }, + { + "output_type": "display_data", "data": { - "text/markdown": [ - "###user:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###user:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "Which comedy movies are shown tonight (01/01/2025) in Mountain view, in which cinema and at what time?" - ], "text/plain": [ "" - ] + ], + "text/markdown": "Turn this place into a party!" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ "--------------------------------------------------------------------------------\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###model:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###model:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function call: { id=None args={'bpm': 120, 'energetic': True, 'loud': True} name='start_music' }\n", + "Function call: { id=None args={'brightness': 0.5} name='dim_lights' }\n", + "Function call: { id=None args={'power': True} name='power_disco_ball' }\n", + "--------------------------------------------------------------------------------\n" + ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "Okay, I can help you with that. First, I need to find the comedy movies playing in Mountain View, CA tonight. Then, for each movie, I will find the theaters showing it and finally get the showtimes for January 1st, 2025.\n", - "\n", - "Let's start by finding the comedy movies.\n" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###user:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ - "Function call: { id=None args={'location': 'Mountain View, CA', 'description': 'comedy'} name='find_movies' }\n", + "Function response: { id=None name='start_music' response={'result': 'Never gonna give you up.'} }\n", + "Function response: { id=None name='dim_lights' response={'result': True} }\n", + "Function response: { id=None name='power_disco_ball' response={'result': True} }\n", "--------------------------------------------------------------------------------\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###user:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###model:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ - "Function response: { id=None name='find_movies' response={'result': ['Barbie', 'Oppenheimer']} }\n", + "Function call: { id=None args={'loud': True, 'energetic': True, 'bpm': 120} name='start_music' }\n", + "Function call: { id=None args={'brightness': 0.5} name='dim_lights' }\n", + "Function call: { id=None args={'power': True} name='power_disco_ball' }\n", "--------------------------------------------------------------------------------\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###model:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###user:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function response: { id=None name='start_music' response={'result': 'Never gonna give you up.'} }\n", + "Function response: { id=None name='dim_lights' response={'result': True} }\n", + "Function response: { id=None name='power_disco_ball' response={'result': True} }\n", + "--------------------------------------------------------------------------------\n" + ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "According to the search results, \"Barbie\" and \"Oppenheimer\" are playing in Mountain View and are categorized as comedy movies.\n", - "\n", - "Now let me find the theaters that are showing \"Barbie\" in Mountain View tonight.\n" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###model:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ - "Function call: { id=None args={'movie': 'Barbie', 'location': 'Mountain View, CA'} name='find_theaters' }\n", + "Function call: { id=None args={'bpm': 120, 'energetic': True, 'loud': True} name='start_music' }\n", + "Function call: { id=None args={'brightness': 0.5} name='dim_lights' }\n", + "Function call: { id=None args={'power': True} name='power_disco_ball' }\n", "--------------------------------------------------------------------------------\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###user:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###user:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ - "Function response: { id=None name='find_theaters' response={'result': ['Googleplex 16', 'Android Theatre']} }\n", + "Function response: { id=None name='start_music' response={'result': 'Never gonna give you up.'} }\n", + "Function response: { id=None name='dim_lights' response={'result': True} }\n", + "Function response: { id=None name='power_disco_ball' response={'result': True} }\n", "--------------------------------------------------------------------------------\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###model:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###model:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function call: { id=None args={'loud': True, 'bpm': 120, 'energetic': True} name='start_music' }\n", + "Function call: { id=None args={'brightness': 0.5} name='dim_lights' }\n", + "Function call: { id=None args={'power': True} name='power_disco_ball' }\n", + "--------------------------------------------------------------------------------\n" + ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "Now I will find the showtimes for \"Barbie\" in Googleplex 16 for tonight, January 1st, 2025.\n" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###user:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ - "Function call: { id=None args={'location': 'Mountain View, CA', 'theater': 'Googleplex 16', 'date': '01/01/2025', 'movie': 'Barbie'} name='get_showtimes' }\n", + "Function response: { id=None name='start_music' response={'result': 'Never gonna give you up.'} }\n", + "Function response: { id=None name='dim_lights' response={'result': True} }\n", + "Function response: { id=None name='power_disco_ball' response={'result': True} }\n", "--------------------------------------------------------------------------------\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###user:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###model:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ - "Function response: { id=None name='get_showtimes' response={'result': ['10:00', '11:00']} }\n", + "Function call: { id=None args={'bpm': 120, 'loud': True, 'energetic': True} name='start_music' }\n", + "Function call: { id=None args={'brightness': 0.5} name='dim_lights' }\n", + "Function call: { id=None args={'power': True} name='power_disco_ball' }\n", "--------------------------------------------------------------------------------\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###model:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###user:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function response: { id=None name='start_music' response={'result': 'Never gonna give you up.'} }\n", + "Function response: { id=None name='dim_lights' response={'result': True} }\n", + "Function response: { id=None name='power_disco_ball' response={'result': True} }\n", + "--------------------------------------------------------------------------------\n" + ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "The movie \"Barbie\" is playing at Googleplex 16 at 10:00 and 11:00 PM tonight.\n", - "\n", - "Now let me find the showtimes for \"Barbie\" in Android Theatre for tonight, January 1st, 2025.\n" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###model:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ - "Function call: { id=None args={'movie': 'Barbie', 'location': 'Mountain View, CA', 'date': '01/01/2025', 'theater': 'Android Theatre'} name='get_showtimes' }\n", + "Function call: { id=None args={'loud': True, 'energetic': True, 'bpm': 120} name='start_music' }\n", + "Function call: { id=None args={'brightness': 0.5} name='dim_lights' }\n", + "Function call: { id=None args={'power': True} name='power_disco_ball' }\n", "--------------------------------------------------------------------------------\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###user:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###user:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ - "Function response: { id=None name='get_showtimes' response={'result': ['10:00', '11:00']} }\n", + "Function response: { id=None name='start_music' response={'result': 'Never gonna give you up.'} }\n", + "Function response: { id=None name='dim_lights' response={'result': True} }\n", + "Function response: { id=None name='power_disco_ball' response={'result': True} }\n", "--------------------------------------------------------------------------------\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###model:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###model:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function call: { id=None args={'energetic': True, 'bpm': 120, 'loud': True} name='start_music' }\n", + "Function call: { id=None args={'brightness': 0.5} name='dim_lights' }\n", + "Function call: { id=None args={'power': True} name='power_disco_ball' }\n", + "--------------------------------------------------------------------------------\n" + ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "The movie \"Barbie\" is playing at Android Theatre at 10:00 and 11:00 PM tonight.\n", - "\n", - "Now let me find the theaters that are showing \"Oppenheimer\" in Mountain View tonight.\n" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###user:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ - "Function call: { id=None args={'movie': 'Oppenheimer', 'location': 'Mountain View, CA'} name='find_theaters' }\n", + "Function response: { id=None name='start_music' response={'result': 'Never gonna give you up.'} }\n", + "Function response: { id=None name='dim_lights' response={'result': True} }\n", + "Function response: { id=None name='power_disco_ball' response={'result': True} }\n", "--------------------------------------------------------------------------------\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###user:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###model:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ - "Function response: { id=None name='find_theaters' response={'result': ['Googleplex 16', 'Android Theatre']} }\n", + "Function call: { id=None args={'loud': True, 'energetic': True, 'bpm': 120} name='start_music' }\n", + "Function call: { id=None args={'brightness': 0.5} name='dim_lights' }\n", + "Function call: { id=None args={'power': True} name='power_disco_ball' }\n", "--------------------------------------------------------------------------------\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###model:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###user:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function response: { id=None name='start_music' response={'result': 'Never gonna give you up.'} }\n", + "Function response: { id=None name='dim_lights' response={'result': True} }\n", + "Function response: { id=None name='power_disco_ball' response={'result': True} }\n", + "--------------------------------------------------------------------------------\n" + ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "Now I will find the showtimes for \"Oppenheimer\" in Googleplex 16 for tonight, January 1st, 2025.\n" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###model:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ - "Function call: { id=None args={'theater': 'Googleplex 16', 'date': '01/01/2025', 'location': 'Mountain View, CA', 'movie': 'Oppenheimer'} name='get_showtimes' }\n", + "Function call: { id=None args={'energetic': True, 'bpm': 120, 'loud': True} name='start_music' }\n", + "Function call: { id=None args={'brightness': 0.5} name='dim_lights' }\n", + "Function call: { id=None args={'power': True} name='power_disco_ball' }\n", "--------------------------------------------------------------------------------\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###user:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###user:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ - "Function response: { id=None name='get_showtimes' response={'result': ['10:00', '11:00']} }\n", + "Function response: { id=None name='start_music' response={'result': 'Never gonna give you up.'} }\n", + "Function response: { id=None name='dim_lights' response={'result': True} }\n", + "Function response: { id=None name='power_disco_ball' response={'result': True} }\n", "--------------------------------------------------------------------------------\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###model:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###model:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function call: { id=None args={'energetic': True, 'loud': True, 'bpm': 120} name='start_music' }\n", + "Function call: { id=None args={'brightness': 0.5} name='dim_lights' }\n", + "Function call: { id=None args={'power': True} name='power_disco_ball' }\n", + "--------------------------------------------------------------------------------\n" + ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "Okay, here are the comedy movies the API found playing tonight, January 1st, 2025, in Mountain View, CA, along with their showtimes:\n", - "\n", - "Based on the search, the movies \"Barbie\" and \"Oppenheimer\" were found under the \"comedy\" description.\n", - "\n", - "For \"Barbie\":\n", - "* At Googleplex 16, the showtimes are 10:00 PM and 11:00 PM.\n", - "* At Android Theatre, the showtimes are 10:00 PM and 11:00 PM.\n", - "\n", - "For \"Oppenheimer\":\n", - "* At Googleplex 16, the showtimes are 10:00 PM and 11:00 PM.\n", - "* At Android Theatre, the showtimes are 10:00 PM and 11:00 PM." - ], "text/plain": [ "" - ] + ], + "text/markdown": "###user:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ + "Function response: { id=None name='start_music' response={'result': 'Never gonna give you up.'} }\n", + "Function response: { id=None name='dim_lights' response={'result': True} }\n", + "Function response: { id=None name='power_disco_ball' response={'result': True} }\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "###model:" + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function call: { id=None args={'energetic': True, 'loud': True, 'bpm': 120} name='start_music' }\n", + "Function call: { id=None args={'brightness': 0.5} name='dim_lights' }\n", + "Function call: { id=None args={'power': True} name='power_disco_ball' }\n", "--------------------------------------------------------------------------------\n" ] } ], "source": [ - "chat = client.chats.create(\n", - " model = MODEL_ID,\n", - " config = {\n", - " \"tools\": theater_functions,\n", + "# Set the chat up with tools.\n", + "party_chat = client.chats.create(\n", + " model=MODEL_ID,\n", + " config={\n", + " \"tools\": house_fns,\n", + " \"tool_config\" : {\n", + " \"function_calling_config\": {\n", + " \"mode\": \"any\"\n", + " }\n", + " }\n", " }\n", ")\n", "\n", - "response = chat.send_message(\n", - " \"Which comedy movies are shown tonight (01/01/2025) in Mountain view, in which cinema and at what time?\"\n", + "# Call the API\n", + "response = party_chat.send_message(\n", + " \"Turn this place into a party!\"\n", ")\n", "\n", - "for content in chat.get_history():\n", - " display(Markdown(\"###\" + content.role + \":\"))\n", - " for part in content.parts:\n", - " if part.text:\n", - " display(Markdown(part.text))\n", - " if part.function_call:\n", - " print(\"Function call: {\", part.function_call, \"}\")\n", - " if part.function_response:\n", - " print(\"Function response: {\", part.function_response, \"}\")\n", - " print(\"-\" * 80)" + "\n", + "print_history(party_chat)" ] }, { "cell_type": "markdown", "metadata": { - "id": "eb364196a719" + "id": "t6iYpty7yZct" }, "source": [ - "Here you can see that the model made seven calls to answer your question and used the outputs of them in the subsequent calls and in the final answer." + "Notice the single model turn contains three FunctionCall parts, which the SDK then executed before getting the final text response." ] }, { "cell_type": "markdown", "metadata": { - "id": "EuwKoNIhGBJN" + "id": "TxXGT3n4AQhk" }, "source": [ - "## Parallel function calls\n", + "## Compositional Function Calling\n", + "The model can chain function calls across multiple turns, using the result from one call to inform the next. This allows for complex, multi-step reasoning and task completion.\n", "\n", - "The Gemini API can call multiple functions in a single turn. This caters for scenarios where there are multiple function calls that can take place independently to complete a task.\n", + "**Example: Finding Specific Movie Showtimes**\n", "\n", - "First set the tools up. Unlike the movie example above, these functions do not require input from each other to be called so they should be good candidates for parallel calling." + "Let's reuse the theater_functions and ask a more complex query that requires finding movies first, then potentially theaters, then showtimes." ] }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 24, "metadata": { - "id": "cJ-mSixWGqLv" + "id": "1jGiexKsAolU", + "outputId": "65b61303-8ce9-4b35-992a-048617455fc1", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + } }, - "outputs": [], + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Here are the showtimes for \"Barbie\" and \"Oppenheimer\" playing in Mountain View, CA on 01/01/2025:\n", + "\n", + "* **Barbie**:\n", + " * At Googleplex 16: 10:00 AM, 11:00 AM\n", + " * At Android Theatre: 10:00 AM, 11:00 AM\n", + "\n", + "* **Oppenheimer**:\n", + " * At Googleplex 16: 10:00 AM, 11:00 AM\n", + " * At Android Theatre: 10:00 AM, 11:00 AM\n", + "\n", + "--- History ---\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "###user:" + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "\n Find comedy movies playing in Mountain View, CA on 01/01/2025.\n First, find the movie titles.\n Then, find the theaters showing those movies.\n Finally, find the showtimes for each movie at each theater.\n" + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "--------------------------------------------------------------------------------\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "###model:" + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "Okay, I can help with that. First, I will find the comedy movie titles playing in Mountain View, CA. Then, I will find the theaters showing those movies, and finally, I will get the showtimes for each movie at each theater on 01/01/2025.\n\nLet's start by finding the movie titles." + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function call: { id=None args={'location': 'Mountain View, CA', 'description': 'comedy movies'} name='find_movies' }\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "###user:" + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function response: { id=None name='find_movies' response={'result': ['Barbie', 'Oppenheimer']} }\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "###model:" + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "OK. I found the following comedy movies playing in Mountain View, CA: \"Barbie\", \"Oppenheimer\".\n\nNow I will find the theaters showing these movies in Mountain View, CA." + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function call: { id=None args={'location': 'Mountain View, CA', 'movie': 'Barbie'} name='find_theaters' }\n", + "Function call: { id=None args={'location': 'Mountain View, CA', 'movie': 'Oppenheimer'} name='find_theaters' }\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "###user:" + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function response: { id=None name='find_theaters' response={'result': ['Googleplex 16', 'Android Theatre']} }\n", + "Function response: { id=None name='find_theaters' response={'result': ['Googleplex 16', 'Android Theatre']} }\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "###model:" + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "OK. Both \"Barbie\" and \"Oppenheimer\" are playing at Googleplex 16 and Android Theatre.\n\nNow, I will find the showtimes for \"Barbie\" and \"Oppenheimer\" at both Googleplex 16 and Android Theatre on 01/01/2025." + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function call: { id=None args={'theater': 'Googleplex 16', 'date': '01/01/2025', 'movie': 'Barbie', 'location': 'Mountain View, CA'} name='get_showtimes' }\n", + "Function call: { id=None args={'movie': 'Barbie', 'date': '01/01/2025', 'theater': 'Android Theatre', 'location': 'Mountain View, CA'} name='get_showtimes' }\n", + "Function call: { id=None args={'theater': 'Googleplex 16', 'location': 'Mountain View, CA', 'date': '01/01/2025', 'movie': 'Oppenheimer'} name='get_showtimes' }\n", + "Function call: { id=None args={'date': '01/01/2025', 'theater': 'Android Theatre', 'location': 'Mountain View, CA', 'movie': 'Oppenheimer'} name='get_showtimes' }\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "###user:" + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function response: { id=None name='get_showtimes' response={'result': ['10:00', '11:00']} }\n", + "Function response: { id=None name='get_showtimes' response={'result': ['10:00', '11:00']} }\n", + "Function response: { id=None name='get_showtimes' response={'result': ['10:00', '11:00']} }\n", + "Function response: { id=None name='get_showtimes' response={'result': ['10:00', '11:00']} }\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "###model:" + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "Here are the showtimes for \"Barbie\" and \"Oppenheimer\" playing in Mountain View, CA on 01/01/2025:\n\n* **Barbie**:\n * At Googleplex 16: 10:00 AM, 11:00 AM\n * At Android Theatre: 10:00 AM, 11:00 AM\n\n* **Oppenheimer**:\n * At Googleplex 16: 10:00 AM, 11:00 AM\n * At Android Theatre: 10:00 AM, 11:00 AM" + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "--------------------------------------------------------------------------------\n" + ] + } + ], "source": [ - "def power_disco_ball(power: bool) -> bool:\n", - " \"\"\"Powers the spinning disco ball.\"\"\"\n", - " print(f\"Disco ball is {'spinning!' if power else 'stopped.'}\")\n", - " return True\n", - "\n", + "chat = client.chats.create(\n", + " model = MODEL_ID,\n", + " config = {\n", + " \"tools\": theater_functions,\n", + " }\n", + ")\n", "\n", - "def start_music(energetic: bool, loud: bool, bpm: int) -> str:\n", - " \"\"\"Play some music matching the specified parameters.\n", + "response = chat.send_message(\"\"\"\n", + " Find comedy movies playing in Mountain View, CA on 01/01/2025.\n", + " First, find the movie titles.\n", + " Then, find the theaters showing those movies.\n", + " Finally, find the showtimes for each movie at each theater.\n", + "\"\"\"\n", + ")\n", "\n", - " Args:\n", - " energetic: Whether the music is energetic or not.\n", - " loud: Whether the music is loud or not.\n", - " bpm: The beats per minute of the music.\n", + "print(response.text)\n", + "print(\"\\n--- History ---\")\n", + "print_history(chat)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "s7J-nyTN8hL-" + }, + "source": [ + "Here you can see that the model made seven calls to answer your question and used the outputs of them in the subsequent calls and in the final answer." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "BsS8_hhNBpLS" + }, + "source": [ + "## Function Calling Configuration using Modes\n", "\n", - " Returns: The name of the song being played.\n", - " \"\"\"\n", - " print(f\"Starting music! {energetic=} {loud=}, {bpm=}\")\n", - " return \"Never gonna give you up.\"\n", + "While AUTO mode (or the SDK's default automatic execution) is often sufficient, you can precisely control when and which functions the model is allowed to call using the tool_config parameter during model/chat initialization or in send_message.\n", "\n", + "The `tool_config` accepts a ToolConfig object, which contains a `FunctionCallingConfig`.\n", "\n", - "def dim_lights(brightness: float) -> bool:\n", - " \"\"\"Dim the lights.\n", + "The `FunctionCallingConfig` has two main fields:\n", "\n", - " Args:\n", - " brightness: The brightness of the lights, 0.0 is off, 1.0 is full.\n", - " \"\"\"\n", - " print(f\"Lights are now set to {brightness:.0%}\")\n", - " return True\n", + "- `mode`: Controls the overall function calling behavior (AUTO, ANY, NONE).\n", "\n", - "house_fns = [power_disco_ball, start_music, dim_lights]" + "- `allowed_function_names`: An optional list of function names the model is restricted to calling in this turn." ] }, { "cell_type": "markdown", "metadata": { - "id": "zlrmXN7fxQi0" + "id": "UGZtasE4CObk" }, "source": [ - "Now call the model with an instruction that could use all of the specified tools." + "### AUTO (Default Mode)\n", + "\n", + "- Behavior: The model decides whether to respond with text or to call one or more functions from the provided `tools`. This is the most flexible mode.\n", + "\n", + "- SDK Default: When using ChatSession with automatic execution enabled, the underlying behavior effectively uses `AUTO` mode unless overridden by `tool_config`." ] }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 29, "metadata": { - "id": "21ecYHLgIsCl" + "id": "mggqLU55CZlj", + "outputId": "5d925b8a-e886-49dc-94a7-f085ec5bb826", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 352 + } }, "outputs": [ { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ - "Disco ball is spinning!\n", - "Starting music! energetic=True loud=True, bpm=130\n", - "Lights are now set to 30%\n" + "LIGHTBOT: Lights enabled.\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###user:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###user:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "Turn this place into a party!" - ], "text/plain": [ "" - ] + ], + "text/markdown": "Turn on the lights!" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ "--------------------------------------------------------------------------------\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###model:" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###model:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function call: { id=None args={} name='enable_lights' }\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "output_type": "display_data", "data": { - "text/markdown": [ - "Okay, I can help with that! I'll turn on the disco ball, start some loud, energetic music, and dim the lights.\n", - "\n" - ], "text/plain": [ "" - ] + ], + "text/markdown": "###user:" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ - "Function call: { id=None args={'power': True} name='power_disco_ball' }\n", - "Function call: { id=None args={'energetic': True, 'loud': True, 'bpm': 130} name='start_music' }\n", - "Function call: { id=None args={'brightness': 0.3} name='dim_lights' }\n", + "Function response: { id=None name='enable_lights' response={'result': None} }\n", "--------------------------------------------------------------------------------\n" ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "###user:" + "text/plain": [ + "" ], + "text/markdown": "###model:" + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { "text/plain": [ "" - ] + ], + "text/markdown": "OK. The lights are on.\n" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ - "Function response: { id=None name='power_disco_ball' response={'result': True} }\n", - "Function response: { id=None name='start_music' response={'result': 'Never gonna give you up.'} }\n", - "Function response: { id=None name='dim_lights' response={'result': True} }\n", "--------------------------------------------------------------------------------\n" ] - }, + } + ], + "source": [ + "chat = client.chats.create(model=MODEL_ID)\n", + "\n", + "response = chat.send_message(\n", + " message=\"Turn on the lights!\",\n", + " config={\n", + " \"system_instruction\": instruction,\n", + " \"tools\": light_controls,\n", + " \"tool_config\" : types.ToolConfig(\n", + " function_calling_config=types.FunctionCallingConfig(\n", + " mode=\"auto\"\n", + " )\n", + " )\n", + " }\n", + ")\n", + "\n", + "print_history(chat)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "3s9_3yVfDfTZ" + }, + "source": [ + "### NONE Mode\n", + "Behavior: The model is explicitly prohibited from calling any functions, even if tools are provided. It will only respond with text. Useful for turns where you want a purely conversational response." + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": { + "id": "Yw5Vn3y3DkG6", + "outputId": "1221a1c1-c4d0-4bc6-e479-8aee0d018188", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 184 + } + }, + "outputs": [ { + "output_type": "display_data", "data": { - "text/markdown": [ - "###model:" + "text/plain": [ + "" ], + "text/markdown": "###user:" + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { "text/plain": [ "" - ] + ], + "text/markdown": "Hello light-bot, what can you do?" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "--------------------------------------------------------------------------------\n" + ] }, { + "output_type": "display_data", "data": { - "text/markdown": [ - "Alright, the party is starting! The disco ball is on, the lights are dimmed, and we've got some energetic music playing." + "text/plain": [ + "" ], + "text/markdown": "###model:" + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { "text/plain": [ "" - ] + ], + "text/markdown": "I can turn lights on and off, and I can set the color." }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { - "name": "stdout", "output_type": "stream", + "name": "stdout", "text": [ "--------------------------------------------------------------------------------\n" ] } ], "source": [ - "# Set the chat up with tools.\n", - "chat = client.chats.create(\n", - " model = MODEL_ID,\n", - " config = {\n", - " \"tools\": house_fns,\n", - " }\n", - ")\n", + "none_chat = client.chats.create(model=MODEL_ID)\n", "\n", - "# Call the API\n", - "response = chat.send_message(\n", - " \"Turn this place into a party!\"\n", + "response = none_chat.send_message(\n", + " message=\"Hello light-bot, what can you do?\",\n", + " config={\n", + " \"system_instruction\": instruction,\n", + " \"tools\": light_controls, # Tools are provided\n", + " \"tool_config\" : types.ToolConfig(\n", + " function_calling_config=types.FunctionCallingConfig(\n", + " mode=\"none\"\n", + " )\n", + " ) # but NONE mode prevents their use\n", + " }\n", ")\n", "\n", - "# Print out each of the function calls requested from this single call.\n", - "for content in chat.get_history():\n", - " display(Markdown(\"###\" + content.role + \":\"))\n", - " for part in content.parts:\n", - " if part.text:\n", - " display(Markdown(part.text))\n", - " if part.function_call:\n", - " print(\"Function call: {\", part.function_call, \"}\")\n", - " if part.function_response:\n", - " print(\"Function response: {\", part.function_response, \"}\")\n", - " print(\"-\" * 80)" + "print_history(none_chat)" ] }, { "cell_type": "markdown", "metadata": { - "id": "t6iYpty7yZct" + "id": "2nUTj3qoDxU_" }, "source": [ - "As you can see, the model didn't wait for the different function calls and instead called multiple ones in parallel." + "### ANY Mode\n", + "- Behavior: Forces the model to call at least one function.\n", + "\n", + " - If allowed_function_names is set, the model must choose one or more functions from that list.\n", + "\n", + " - If allowed_function_names is not set, the model must choose one or more functions from the full tools list.\n", + "\n", + "- If automatic function calling is enabled, the SDK will call functions automatically until [maximum_remote_calls](https://googleapis.github.io/python-genai/genai.html#genai.types.AutomaticFunctionCallingConfig.maximum_remote_calls) is reached (default: 10).\n", + "- To allow x automatic function calls, set maximum_remote_calls to x + 1. [Read more](https://pypi.org/project/google-genai/#:~:text=Function%20calling%20with%20ANY%20tools%20config%20mode)\n", + "- Use Case: Useful when the application state dictates that the next step must involve a specific action or set of actions." + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": { + "id": "7fmljA2RDw9a", + "outputId": "577fe56a-5835-4da1-b175-c23ec5d07348", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 344 + } + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "LIGHTBOT: Lights enabled.\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "###user:" + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "Make this place PURPLE!" + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "--------------------------------------------------------------------------------\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "###model:" + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function call: { id=None args={} name='enable_lights' }\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "###user:" + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function response: { id=None name='enable_lights' response={'result': None} }\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/markdown": "###model:" + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function call: { id=None args={} name='enable_lights' }\n", + "--------------------------------------------------------------------------------\n" + ] + } + ], + "source": [ + "chat = client.chats.create(model=MODEL_ID)\n", + "\n", + "response = chat.send_message(\n", + " \"Make this place PURPLE!\",\n", + " config={\n", + " \"system_instruction\": instruction,\n", + " \"tools\": light_controls, # Provide all tools\n", + " \"tool_config\" : {\n", + " \"function_calling_config\": {\n", + " \"mode\": \"any\"\n", + " }\n", + " },\n", + " \"automatic_function_calling\": {\n", + " \"maximum_remote_calls\" : 1\n", + " }\n", + " } # But restrict to available_fns with ANY mode\n", + ")\n", + "\n", + "print_history(chat)" ] }, { @@ -1388,7 +2219,7 @@ "hY2NtS3jV56U" ], "name": "Function_calling.ipynb", - "toc_visible": true + "provenance": [] }, "google": { "image_path": "/site-assets/images/share.png", @@ -1408,4 +2239,4 @@ }, "nbformat": 4, "nbformat_minor": 0 -} +} \ No newline at end of file diff --git a/quickstarts/Function_calling_config.ipynb b/quickstarts/Function_calling_config.ipynb index 7ab78b18c..e828dad55 100644 --- a/quickstarts/Function_calling_config.ipynb +++ b/quickstarts/Function_calling_config.ipynb @@ -11,7 +11,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": { "cellView": "form", "id": "tuOe1ymfHZPu" @@ -48,323 +48,7 @@ "id": "1e41a2ce62eb" }, "source": [ - "Specifying a `function_calling_config` allows you to control how the Gemini API acts when `tools` have been specified. For example, you can choose to only allow free-text output (disabling function calling), force it to choose from a subset of the functions provided in `tools`, or let it act automatically.\n", - "\n", - "This guide assumes you are already familiar with function calling. For an introduction, check out the [docs](https://ai.google.dev/docs/function_calling)." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "id": "m4DhA4907Asz" - }, - "outputs": [], - "source": [ - "%pip install -U -q \"google-generativeai>=0.7.2\"" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "aU-mY9hi8pQh" - }, - "source": [ - "To run the following cell, your API key must be stored in a Colab Secret named `GOOGLE_API_KEY`. If you don't already have an API key, or you're not sure how to create a Colab Secret, see the [Authentication](https://github.com/google-gemini/gemini-api-cookbook/blob/main/quickstarts/Authentication.ipynb) quickstart for an example." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "id": "wp3W4Pdf8rBO" - }, - "outputs": [], - "source": [ - "from google.colab import userdata\n", - "import google.generativeai as genai\n", - "\n", - "genai.configure(api_key=userdata.get(\"GOOGLE_API_KEY\"))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "iJqil-VL8ug-" - }, - "source": [ - "## Set up a model with tools\n", - "\n", - "This example uses 3 functions that control a simple hypothetical lighting system. Using these functions requires them to be called in a specific order. For example, you must turn the light system on before you can change color.\n", - "\n", - "While you can pass these directly to the model and let it try to call them correctly, specifying the `function_calling_config` gives you precise control over the functions that are available to the model." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "id": "gLS26n7A9l9B" - }, - "outputs": [], - "source": [ - "model_name = \"gemini-2.0-flash\" # @param [\"gemini-1.5-flash-latest\",\"gemini-2.0-flash-lite\",\"gemini-2.0-flash\",\"gemini-2.5-pro-exp-03-25\"] {\"allow-input\":true}\n", - "\n", - "def enable_lights():\n", - " \"\"\"Turn on the lighting system.\"\"\"\n", - " print(\"LIGHTBOT: Lights enabled.\")\n", - "\n", - "\n", - "def set_light_color(rgb_hex: str):\n", - " \"\"\"Set the light color. Lights must be enabled for this to work.\"\"\"\n", - " print(f\"LIGHTBOT: Lights set to {rgb_hex}.\")\n", - "\n", - "\n", - "def stop_lights():\n", - " \"\"\"Stop flashing lights.\"\"\"\n", - " print(\"LIGHTBOT: Lights turned off.\")\n", - "\n", - "\n", - "light_controls = [enable_lights, set_light_color, stop_lights]\n", - "instruction = \"You are a helpful lighting system bot. You can turn lights on and off, and you can set the color. Do not perform any other tasks.\"\n", - "\n", - "model = genai.GenerativeModel(\n", - " model_name, tools=light_controls, system_instruction=instruction\n", - ")\n", - "\n", - "chat = model.start_chat()" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "JqROCznFCj_Y" - }, - "source": [ - "Create a helper function for setting `function_calling_config` on `tool_config`." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "id": "_QgLFPL4Chon" - }, - "outputs": [], - "source": [ - "from google.generativeai.types import content_types\n", - "from collections.abc import Iterable\n", - "\n", - "\n", - "def tool_config_from_mode(mode: str, fns: Iterable[str] = ()):\n", - " \"\"\"Create a tool config with the specified function calling mode.\"\"\"\n", - " return content_types.to_tool_config(\n", - " {\"function_calling_config\": {\"mode\": mode, \"allowed_function_names\": fns}}\n", - " )" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "ofMEuh_MFdMf" - }, - "source": [ - "## Text-only mode: `NONE`\n", - "\n", - "If you have provided the model with tools, but do not want to use those tools for the current conversational turn, then specify `NONE` as the mode. `NONE` tells the model not to make any function calls, and will behave as though none have been provided." - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "id": "6ZlIFwXqGA09" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Hello! I can turn lights on and off and set their color. Just let me know what you'd like me to do.\n", - "\n" - ] - } - ], - "source": [ - "tool_config = tool_config_from_mode(\"none\")\n", - "\n", - "response = chat.send_message(\n", - " \"Hello light-bot, what can you do?\", tool_config=tool_config\n", - ")\n", - "print(response.text)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "uux063sjHZ_Z" - }, - "source": [ - "## Automatic mode: `AUTO`\n", - "\n", - "To allow the model to decide whether to respond in text or call specific functions, you can specify `AUTO` as the mode." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "id": "vwO9dUjvHoT8" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "function_call {\n", - " name: \"enable_lights\"\n", - " args {\n", - " }\n", - "}\n", - "\n" - ] - } - ], - "source": [ - "tool_config = tool_config_from_mode(\"auto\")\n", - "\n", - "response = chat.send_message(\"Light this place up!\", tool_config=tool_config)\n", - "print(response.parts[0])\n", - "chat.rewind(); # You are not actually calling the function, so remove this from the history." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "oHhaO-P9CBPb" - }, - "source": [ - "## Function-calling mode: `ANY`\n", - "\n", - "Setting the mode to `ANY` will force the model to make a function call. By setting `allowed_function_names`, the model will only choose from those functions. If it is not set, all of the functions in `tools` are candidates for function calling.\n", - "\n", - "In this example system, if the lights are already on, then the user can change color or turn the lights off." - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": { - "id": "GQpz94zrCNJF" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "function_call {\n", - " name: \"set_light_color\"\n", - " args {\n", - " fields {\n", - " key: \"rgb_hex\"\n", - " value {\n", - " string_value: \"800080\"\n", - " }\n", - " }\n", - " }\n", - "}\n", - "\n" - ] - } - ], - "source": [ - "available_fns = [\"set_light_color\", \"stop_lights\"]\n", - "\n", - "tool_config = tool_config_from_mode(\"any\", available_fns)\n", - "\n", - "response = chat.send_message(\"Make this place PURPLE!\", tool_config=tool_config)\n", - "print(response.parts[0])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "8cGrRy-uJ7-J" - }, - "source": [ - "## Automatic function calling\n", - "\n", - "`tool_config` works when enabling automatic function calling too." - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": { - "id": "hx7aIX8OXvi6" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "LIGHTBOT: Lights enabled.\n" - ] - }, - { - "data": { - "text/plain": [ - "response:\n", - "GenerateContentResponse(\n", - " done=True,\n", - " iterator=None,\n", - " result=protos.GenerateContentResponse({\n", - " \"candidates\": [\n", - " {\n", - " \"content\": {\n", - " \"parts\": [\n", - " {\n", - " \"text\": \"Okay, I've turned on the lights.\"\n", - " }\n", - " ],\n", - " \"role\": \"model\"\n", - " },\n", - " \"finish_reason\": \"STOP\",\n", - " \"avg_logprobs\": -0.00836455523967743\n", - " }\n", - " ],\n", - " \"usage_metadata\": {\n", - " \"prompt_token_count\": 86,\n", - " \"candidates_token_count\": 10,\n", - " \"total_token_count\": 96\n", - " },\n", - " \"model_version\": \"gemini-2.0-flash\"\n", - " }),\n", - ")" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "available_fns = [\"enable_lights\"]\n", - "tool_config = tool_config_from_mode(\"any\", available_fns)\n", - "\n", - "auto_chat = model.start_chat(enable_automatic_function_calling=True)\n", - "auto_chat.send_message(\"It's awful dark in here...\", tool_config=tool_config)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "kz8McBZfXg0N" - }, - "source": [ - "## Further reading\n", - "\n", - "Check out the function calling [quickstart](https://github.com/google-gemini/cookbook/blob/main/quickstarts/Function_calling.ipynb) for an introduction to function calling. You can find another fun function calling example [here](https://github.com/google-gemini/cookbook/blob/main/quickstarts/rest/Function_calling_REST.ipynb) using curl.\n" + "This notebook has been migrated to the new [`quickstarts/function_calling.ipynb`](./Function_calling.ipynb) notebook highlighting the latest Gemini capabilities and formatting." ] } ],