Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged examples/function_calling_config.ipynb into examples/function_calling.ipynb notebook #631

Open
wants to merge 26 commits into
base: main
Choose a base branch
from
Open
Changes from 2 commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
204 changes: 118 additions & 86 deletions quickstarts/Function_calling_config.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"id": "Tce3stUlHN0L"
},
"source": [
"##### Copyright 2024 Google LLC."
"##### Copyright 2025 Google LLC."
]
},
{
Expand Down Expand Up @@ -61,7 +61,7 @@
},
"outputs": [],
"source": [
"%pip install -U -q \"google-generativeai>=0.7.2\""
"%pip install -U -q \"google-genai>=1.0.0\""
]
},
{
Expand All @@ -82,9 +82,9 @@
"outputs": [],
"source": [
"from google.colab import userdata\n",
"import google.generativeai as genai\n",
"from google import genai\n",
"\n",
"genai.configure(api_key=userdata.get(\"GOOGLE_API_KEY\"))"
"client = genai.Client(api_key=userdata.get(\"GOOGLE_API_KEY\"))"
]
},
{
Expand All @@ -108,7 +108,7 @@
},
"outputs": [],
"source": [
"model_name = \"gemini-2.0-flash\" # @param [\"gemini-1.5-flash-latest\",\"gemini-2.0-flash-lite\",\"gemini-2.0-flash\",\"gemini-2.5-pro-exp-03-25\"] {\"allow-input\":true}\n",
"MODEL_ID = \"gemini-2.0-flash\" # @param [\"gemini-1.5-flash-latest\",\"gemini-2.0-flash-lite\",\"gemini-2.0-flash\",\"gemini-2.5-pro-exp-03-25\"] {\"allow-input\":true}\n",
"\n",
"def enable_lights():\n",
" \"\"\"Turn on the lighting system.\"\"\"\n",
Expand All @@ -128,11 +128,9 @@
"light_controls = [enable_lights, set_light_color, stop_lights]\n",
"instruction = \"You are a helpful lighting system bot. You can turn lights on and off, and you can set the color. Do not perform any other tasks.\"\n",
"\n",
"model = genai.GenerativeModel(\n",
" model_name, tools=light_controls, system_instruction=instruction\n",
")\n",
"\n",
"chat = model.start_chat()"
"chat = client.chats.create(\n",
" model = MODEL_ID,\n",
")\n"
]
},
{
Expand All @@ -152,17 +150,53 @@
},
"outputs": [],
"source": [
"from google.generativeai.types import content_types\n",
"from google.genai import types\n",
"from collections.abc import Iterable\n",
"\n",
"\n",
"def tool_config_from_mode(mode: str, fns: Iterable[str] = ()):\n",
" \"\"\"Create a tool config with the specified function calling mode.\"\"\"\n",
" return content_types.to_tool_config(\n",
" {\"function_calling_config\": {\"mode\": mode, \"allowed_function_names\": fns}}\n",
" )"
" tool_config = types.ToolConfig(\n",
" function_calling_config = {\n",
" \"mode\": mode,\n",
" \"allowed_function_names\": fns\n",
" }\n",
" )\n",
""
]
},
{
"cell_type": "markdown",
"source": [
"Define a function to analyze turns wise chat between model and user function"
],
"metadata": {
"id": "v7hOEeTbZxab"
}
},
{
"cell_type": "code",
"source": [
"from IPython.display import Markdown, display\n",
"\n",
"def print_history():\n",
" for content in chat.get_history():\n",
" display(Markdown(\"###\" + content.role + \":\"))\n",
" for part in content.parts:\n",
" if part.text:\n",
" display(Markdown(part.text))\n",
" if part.function_call:\n",
" print(\"Function call: {\", part.function_call, \"}\")\n",
" if part.function_response:\n",
" print(\"Function response: {\", part.function_response, \"}\")\n",
" print(\"-\" * 80)"
],
"metadata": {
"id": "2x9iSru0ZwYZ"
},
"execution_count": 6,
"outputs": []
},
{
"cell_type": "markdown",
"metadata": {
Expand All @@ -176,25 +210,33 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": 7,
"metadata": {
"id": "6ZlIFwXqGA09"
"id": "6ZlIFwXqGA09",
"outputId": "7ecdf105-607f-47e8-eaa2-b30e1d6b27a6",
"colab": {
"base_uri": "https://localhost:8080/"
}
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"name": "stdout",
"text": [
"Hello! I can turn lights on and off and set their color. Just let me know what you'd like me to do.\n",
"\n"
"I can turn lights on and off, and I can set the color.\n"
]
}
],
"source": [
"tool_config = tool_config_from_mode(\"none\")\n",
"tool_config = tool_config_from_mode(\"NONE\")\n",
"\n",
"response = chat.send_message(\n",
" \"Hello light-bot, what can you do?\", tool_config=tool_config\n",
" message=\"Hello light-bot, what can you do?\",\n",
" config={\n",
" \"system_instruction\": instruction,\n",
" \"tools\": light_controls,\n",
" \"tool_config\" : tool_config\n",
" }\n",
")\n",
"print(response.text)"
]
Expand All @@ -212,30 +254,34 @@
},
{
"cell_type": "code",
"execution_count": 7,
"execution_count": 8,
"metadata": {
"id": "vwO9dUjvHoT8"
"id": "vwO9dUjvHoT8",
"outputId": "dc99e3b5-9139-4476-b1a4-3bc2ee0b904f",
"colab": {
"base_uri": "https://localhost:8080/"
}
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"name": "stdout",
"text": [
"function_call {\n",
" name: \"enable_lights\"\n",
" args {\n",
" }\n",
"}\n",
"\n"
"LIGHTBOT: Lights enabled.\n"
]
}
],
"source": [
"tool_config = tool_config_from_mode(\"auto\")\n",
"\n",
"response = chat.send_message(\"Light this place up!\", tool_config=tool_config)\n",
"print(response.parts[0])\n",
"chat.rewind(); # You are not actually calling the function, so remove this from the history."
"response = chat.send_message(\n",
" message = \"Light this place up!\",\n",
" config={\n",
" \"system_instruction\": instruction,\n",
" \"tools\": light_controls,\n",
" \"tool_config\" : tool_config\n",
" }\n",
")\n"
]
},
{
Expand All @@ -253,27 +299,20 @@
},
{
"cell_type": "code",
"execution_count": 8,
"execution_count": 9,
"metadata": {
"id": "GQpz94zrCNJF"
"id": "GQpz94zrCNJF",
"outputId": "6050db3e-3867-4bcc-9621-a89846649adb",
"colab": {
"base_uri": "https://localhost:8080/"
}
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"name": "stdout",
"text": [
"function_call {\n",
" name: \"set_light_color\"\n",
" args {\n",
" fields {\n",
" key: \"rgb_hex\"\n",
" value {\n",
" string_value: \"800080\"\n",
" }\n",
" }\n",
" }\n",
"}\n",
"\n"
"LIGHTBOT: Lights set to #800080.\n"
]
}
],
Expand All @@ -282,8 +321,13 @@
"\n",
"tool_config = tool_config_from_mode(\"any\", available_fns)\n",
"\n",
"response = chat.send_message(\"Make this place PURPLE!\", tool_config=tool_config)\n",
"print(response.parts[0])"
"response = chat.send_message(\n",
" \"Make this place PURPLE!\",\n",
" config={\n",
" \"system_instruction\": instruction,\n",
" \"tools\": light_controls,\n",
" \"tool_config\" : tool_config\n",
" })\n"
]
},
{
Expand All @@ -299,61 +343,48 @@
},
{
"cell_type": "code",
"execution_count": 9,
"execution_count": 10,
"metadata": {
"id": "hx7aIX8OXvi6"
"id": "hx7aIX8OXvi6",
"outputId": "96649781-1bd4-44c4-cc02-acf844bc4793",
"colab": {
"base_uri": "https://localhost:8080/"
}
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"name": "stdout",
"text": [
"LIGHTBOT: Lights enabled.\n"
]
},
{
"output_type": "execute_result",
"data": {
"text/plain": [
"response:\n",
"GenerateContentResponse(\n",
" done=True,\n",
" iterator=None,\n",
" result=protos.GenerateContentResponse({\n",
" \"candidates\": [\n",
" {\n",
" \"content\": {\n",
" \"parts\": [\n",
" {\n",
" \"text\": \"Okay, I've turned on the lights.\"\n",
" }\n",
" ],\n",
" \"role\": \"model\"\n",
" },\n",
" \"finish_reason\": \"STOP\",\n",
" \"avg_logprobs\": -0.00836455523967743\n",
" }\n",
" ],\n",
" \"usage_metadata\": {\n",
" \"prompt_token_count\": 86,\n",
" \"candidates_token_count\": 10,\n",
" \"total_token_count\": 96\n",
" },\n",
" \"model_version\": \"gemini-2.0-flash\"\n",
" }),\n",
")"
"GenerateContentResponse(candidates=[Candidate(content=Content(parts=[Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=None, inline_data=None, text=\"Okay, I've turned on the lights.\")], role='model'), citation_metadata=None, finish_message=None, token_count=None, finish_reason=<FinishReason.STOP: 'STOP'>, avg_logprobs=-0.009940502792596817, grounding_metadata=None, index=None, logprobs_result=None, safety_ratings=None)], create_time=None, response_id=None, model_version='gemini-2.0-flash', prompt_feedback=None, usage_metadata=GenerateContentResponseUsageMetadata(cache_tokens_details=None, cached_content_token_count=None, candidates_token_count=10, candidates_tokens_details=[ModalityTokenCount(modality=<MediaModality.TEXT: 'TEXT'>, token_count=10)], prompt_token_count=86, prompt_tokens_details=[ModalityTokenCount(modality=<MediaModality.TEXT: 'TEXT'>, token_count=86)], thoughts_token_count=None, tool_use_prompt_token_count=None, tool_use_prompt_tokens_details=None, total_token_count=96), automatic_function_calling_history=[UserContent(parts=[Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=None, inline_data=None, text=\"It's awful dark in here...\")], role='user'), Content(parts=[Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=FunctionCall(id=None, args={}, name='enable_lights'), function_response=None, inline_data=None, text=None)], role='model'), Content(parts=[Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=FunctionResponse(id=None, name='enable_lights', response={'result': None}), inline_data=None, text=None)], role='user')], parsed=None)"
]
},
"execution_count": 9,
"metadata": {},
"output_type": "execute_result"
"execution_count": 10
}
],
"source": [
"available_fns = [\"enable_lights\"]\n",
"tool_config = tool_config_from_mode(\"any\", available_fns)\n",
"\n",
"auto_chat = model.start_chat(enable_automatic_function_calling=True)\n",
"auto_chat.send_message(\"It's awful dark in here...\", tool_config=tool_config)"
"auto_chat = client.chats.create(\n",
" model=MODEL_ID\n",
")\n",
"\n",
"auto_chat.send_message(\n",
" message = \"It's awful dark in here...\",\n",
" config={\n",
" \"system_instruction\": instruction,\n",
" \"tools\": light_controls,\n",
" \"tool_config\" : tool_config\n",
" })\n"
]
},
{
Expand All @@ -371,7 +402,8 @@
"metadata": {
"colab": {
"name": "Function_calling_config.ipynb",
"toc_visible": true
"toc_visible": true,
"provenance": []
},
"kernelspec": {
"display_name": "Python 3",
Expand All @@ -380,4 +412,4 @@
},
"nbformat": 4,
"nbformat_minor": 0
}
}