Basic Usage
Define functions that the model can call:Copy
Ask AI
from openai import OpenAI
import json
client = OpenAI(
api_key="your_truefoundry_api_key",
base_url="<truefoundry-base-url>/api/llm"
)
# Define a function
tools = [{
"type": "function",
"function": {
"name": "get_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"]
}
},
"required": ["location"]
}
}
}]
# Make the request
response = client.chat.completions.create(
model="openai-main/gpt-4o-mini",
messages=[{"role": "user", "content": "What's the weather in New York?"}],
tools=tools
)
# Check if the model wants to call a function
if response.choices[0].message.tool_calls:
tool_call = response.choices[0].message.tool_calls[0]
function_name = tool_call.function.name
function_args = json.loads(tool_call.function.arguments)
print(f"Function called: {function_name}")
print(f"Arguments: {function_args}")
Function Definition Schema
Basic Structure
Basic Structure
When defining functions, you need to provide:
- name: The function name
- description: What the function does
- parameters: JSON Schema object describing the parameters
Copy
Ask AI
function_schema = {
"name": "get_weather",
"description": "Get current weather information",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City name"
}
},
"required": ["location"]
}
}
Parameter Types
Parameter Types
Functions support various parameter types:
Copy
Ask AI
function_schema = {
"name": "process_data",
"description": "Process data with various parameters",
"parameters": {
"type": "object",
"properties": {
"text": {
"type": "string",
"description": "Text to process"
},
"count": {
"type": "integer",
"description": "Number of items"
},
"confidence": {
"type": "number",
"description": "Threshold (0.0 to 1.0)"
},
"enabled": {
"type": "boolean",
"description": "Whether processing is enabled"
},
"categories": {
"type": "array",
"items": {"type": "string"},
"description": "List of categories"
}
},
"required": ["text"]
}
}
Complete Workflow
Multiple Functions
Multiple Functions
Define multiple functions for the model to choose from:
Copy
Ask AI
tools = [
{
"type": "function",
"function": {
"name": "get_weather",
"description": "Get current weather information",
"parameters": {
"type": "object",
"properties": {
"location": {"type": "string"},
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}
},
"required": ["location"]
}
}
},
{
"type": "function",
"function": {
"name": "search_web",
"description": "Search the web for information",
"parameters": {
"type": "object",
"properties": {
"query": {"type": "string"},
"max_results": {"type": "integer", "default": 5}
},
"required": ["query"]
}
}
}
]
Handling Function Calls
Handling Function Calls
Process function calls and continue the conversation:
Copy
Ask AI
# Initial request
messages = [{"role": "user", "content": "What's the weather in Tokyo?"}]
response = client.chat.completions.create(
model="openai-main/gpt-4o-mini",
messages=messages,
tools=tools
)
# Handle function call
if response.choices[0].message.tool_calls:
messages.append(response.choices[0].message)
for tool_call in response.choices[0].message.tool_calls:
function_name = tool_call.function.name
function_args = json.loads(tool_call.function.arguments)
# Execute your function (simulated here)
if function_name == "get_weather":
result = f"The weather in {function_args['location']} is 22°C and sunny"
# Add the function result to the conversation
messages.append({
"role": "tool",
"tool_call_id": tool_call.id,
"content": result
})
# Continue the conversation
final_response = client.chat.completions.create(
model="openai-main/gpt-4o-mini",
messages=messages
)
print(final_response.choices[0].message.content)
Controlling Function Calls
Controlling Function Calls
Control when and how functions are called:
Copy
Ask AI
# Force a specific function call
response = client.chat.completions.create(
model="openai-main/gpt-4o-mini",
messages=[{"role": "user", "content": "What's the weather?"}],
tools=tools,
tool_choice={"type": "function", "function": {"name": "get_weather"}}
)
# Allow automatic function calling (default)
response = client.chat.completions.create(
model="openai-main/gpt-4o-mini",
messages=[{"role": "user", "content": "What's the weather?"}],
tools=tools,
tool_choice="auto"
)
# Prevent function calling
response = client.chat.completions.create(
model="openai-main/gpt-4o-mini",
messages=[{"role": "user", "content": "What's the weather?"}],
tools=tools,
tool_choice="none"
)
# Force any function call
response = client.chat.completions.create(
model="openai-main/gpt-4o-mini",
messages=[{"role": "user", "content": "What's the weather?"}],
tools=tools,
tool_choice="required"
)