OpenAI Function Calling: Enable GPT-4 to search locations, calculate routes, and access spatial intelligence by defining Camino AI functions that the model can call automatically when needed.
Why OpenAI + Camino AI?
- Intelligent Tool Selection: GPT-4 automatically decides when to search locations based on user intent
- Natural Conversations: Users ask in plain English, AI handles the technical API calls
- Structured Outputs: Reliable JSON responses that GPT-4 can reason about
- Cost-Effective: Camino AI costs $0.001 per call vs $0.017 for Google Places
Quick Start: Basic Function
1. Install OpenAI SDK
pip install openai requests
2. Define Camino AI Function Schema
import openai
import requests
import json
import os
openai.api_key = os.getenv("OPENAI_API_KEY")
CAMINO_API_KEY = os.getenv("CAMINO_API_KEY")
# Define the function schema
tools = [
{
"type": "function",
"function": {
"name": "search_locations",
"description": "Search for places and locations using natural language. Can find restaurants, hotels, shops, attractions, or any physical location. Supports context-aware queries like 'quiet cafes' or 'pet-friendly hotels'.",
"parameters": {
"type": "object",
"properties": {
"query": {
"type": "string",
"description": "Natural language description of what to search for (e.g., 'coffee shops', 'hotels near Times Square', 'family-friendly restaurants')"
},
"latitude": {
"type": "number",
"description": "Latitude coordinate for search center. If omitted, AI will intelligently determine location from query context."
},
"longitude": {
"type": "number",
"description": "Longitude coordinate for search center. If omitted, AI will intelligently determine location from query context."
},
"radius": {
"type": "integer",
"description": "Search radius in meters. Default: 1000. Use larger values (5000+) for cities, smaller (500) for dense areas.",
"default": 1000
},
"limit": {
"type": "integer",
"description": "Maximum number of results to return (1-100). Default: 20.",
"default": 20
}
},
"required": ["query"]
}
}
}
]
3. Implement Function Handler
def search_locations(query, latitude=None, longitude=None, radius=1000, limit=20):
"""Call Camino AI to search for locations"""
params = {
"query": query,
"radius": radius,
"limit": limit,
"rank": True,
"answer": True
}
if latitude and longitude:
params["lat"] = latitude
params["lon"] = longitude
headers = {"X-API-Key": CAMINO_API_KEY}
try:
response = requests.get(
"https://api.getcamino.ai/query",
params=params,
headers=headers
)
response.raise_for_status()
return response.json()
except Exception as e:
return {"error": str(e)}
4. Create Chat with Function Calling
def chat_with_locations(user_message, conversation_history=None):
if conversation_history is None:
conversation_history = []
# Add user message to history
conversation_history.append({"role": "user", "content": user_message})
# Initial API call
response = openai.chat.completions.create(
model="gpt-4",
messages=conversation_history,
tools=tools,
tool_choice="auto" # Let GPT decide when to use tools
)
response_message = response.choices[0].message
tool_calls = response_message.tool_calls
# If GPT wants to call a function
if tool_calls:
conversation_history.append(response_message)
# Execute each tool call
for tool_call in tool_calls:
function_name = tool_call.function.name
function_args = json.loads(tool_call.function.arguments)
# Call the actual function
if function_name == "search_locations":
function_response = search_locations(**function_args)
# Add function response to conversation
conversation_history.append({
"tool_call_id": tool_call.id,
"role": "tool",
"name": function_name,
"content": json.dumps(function_response)
})
# Get final response from GPT
second_response = openai.chat.completions.create(
model="gpt-4",
messages=conversation_history
)
return second_response.choices[0].message.content
# No function call needed
return response_message.content
# Use the chat function
result = chat_with_locations("Find me romantic restaurants in Paris with outdoor seating")
print(result)
How it Works:
- User asks a location question in natural language
- GPT-4 recognizes the need for location data and calls the function
- Camino AI executes the search and returns results
- GPT-4 processes results and responds naturally to the user
Advanced: Multiple Functions
Add Route Calculation
# Add to tools array
route_tool = {
"type": "function",
"function": {
"name": "calculate_route",
"description": "Calculate route and get turn-by-turn directions between two locations. Returns distance, duration, and detailed driving/walking/cycling directions.",
"parameters": {
"type": "object",
"properties": {
"start_latitude": {
"type": "number",
"description": "Starting point latitude"
},
"start_longitude": {
"type": "number",
"description": "Starting point longitude"
},
"end_latitude": {
"type": "number",
"description": "Destination latitude"
},
"end_longitude": {
"type": "number",
"description": "Destination longitude"
},
"mode": {
"type": "string",
"enum": ["car", "bike", "foot"],
"description": "Transportation mode",
"default": "car"
}
},
"required": ["start_latitude", "start_longitude", "end_latitude", "end_longitude"]
}
}
}
tools.append(route_tool)
# Implement handler
def calculate_route(start_latitude, start_longitude, end_latitude, end_longitude, mode="car"):
params = {
"start_lat": start_latitude,
"start_lon": start_longitude,
"end_lat": end_latitude,
"end_lon": end_longitude,
"mode": mode
}
headers = {"X-API-Key": CAMINO_API_KEY}
response = requests.get(
"https://api.getcamino.ai/route",
params=params,
headers=headers
)
return response.json()
Add Location Context
context_tool = {
"type": "function",
"function": {
"name": "get_location_context",
"description": "Get AI-generated contextual information about a specific area, including neighborhood character, nearby amenities, and suitability for different purposes (e.g., families, tourists, business).",
"parameters": {
"type": "object",
"properties": {
"latitude": {"type": "number"},
"longitude": {"type": "number"},
"radius": {
"type": "integer",
"default": 500,
"description": "Analysis radius in meters"
},
"context_query": {
"type": "string",
"description": "Specific question about the area (e.g., 'Is this good for families?', 'What's the nightlife like?')"
}
},
"required": ["latitude", "longitude"]
}
}
}
def get_location_context(latitude, longitude, radius=500, context_query=""):
data = {
"location": {"lat": latitude, "lon": longitude},
"radius": radius,
"context": context_query
}
headers = {"X-API-Key": CAMINO_API_KEY}
response = requests.post(
"https://api.getcamino.ai/context",
json=data,
headers=headers
)
return response.json()
Complete Example: Travel Planning Assistant
import openai
import requests
import json
import os
class TravelAssistant:
def __init__(self, openai_key, camino_key):
openai.api_key = openai_key
self.camino_key = camino_key
self.conversation = [{
"role": "system",
"content": "You are a helpful travel assistant with access to real-time location data. Help users plan trips, find places, and get directions. Always be specific and provide actionable recommendations."
}]
self.tools = [
{
"type": "function",
"function": {
"name": "search_locations",
"description": "Search for places using natural language",
"parameters": {
"type": "object",
"properties": {
"query": {"type": "string"},
"latitude": {"type": "number"},
"longitude": {"type": "number"},
"radius": {"type": "integer", "default": 1000}
},
"required": ["query"]
}
}
},
# Add route and context tools as shown above
]
def search_locations(self, query, latitude=None, longitude=None, radius=1000):
params = {"query": query, "radius": radius, "rank": True, "answer": True}
if latitude and longitude:
params.update({"lat": latitude, "lon": longitude})
response = requests.get(
"https://api.getcamino.ai/query",
params=params,
headers={"X-API-Key": self.camino_key}
)
return response.json()
def chat(self, message):
self.conversation.append({"role": "user", "content": message})
response = openai.chat.completions.create(
model="gpt-4",
messages=self.conversation,
tools=self.tools,
tool_choice="auto"
)
response_message = response.choices[0].message
if response_message.tool_calls:
self.conversation.append(response_message)
for tool_call in response_message.tool_calls:
function_name = tool_call.function.name
arguments = json.loads(tool_call.function.arguments)
# Execute function
if function_name == "search_locations":
result = self.search_locations(**arguments)
self.conversation.append({
"tool_call_id": tool_call.id,
"role": "tool",
"name": function_name,
"content": json.dumps(result)
})
# Get final response
final_response = openai.chat.completions.create(
model="gpt-4",
messages=self.conversation
)
assistant_message = final_response.choices[0].message.content
else:
assistant_message = response_message.content
self.conversation.append({"role": "assistant", "content": assistant_message})
return assistant_message
# Use the assistant
assistant = TravelAssistant(
openai_key=os.getenv("OPENAI_API_KEY"),
camino_key=os.getenv("CAMINO_API_KEY")
)
print(assistant.chat("I'm in Tokyo. Find me authentic ramen shops within walking distance"))
print(assistant.chat("Which one is closest to Shibuya station?"))
print(assistant.chat("Get me directions to the closest one"))
Best Practices
- Clear Descriptions: Write detailed function descriptions so GPT knows exactly when to use each tool
- Optional Parameters: Make latitude/longitude optional - Camino AI can infer location from query
- Error Handling: Always catch exceptions and return user-friendly error messages
- Conversation Context: Maintain conversation history for follow-up questions
- Result Formatting: Let GPT-4 process and summarize results naturally
- Tool Choice: Use `tool_choice="auto"` to let GPT decide when tools are needed
Common Use Cases
Hotel Finder Chatbot
user: "Find me a budget hotel near JFK airport"
# GPT calls search_locations("budget hotels near JFK airport", latitude=40.6413, longitude=-73.7781)
assistant: "I found several budget-friendly hotels near JFK..."
Restaurant Recommendations
user: "Where should I eat dinner tonight? I'm near Central Park and love Italian food"
# GPT calls search_locations("Italian restaurants", latitude=40.7829, longitude=-73.9654)
assistant: "Based on your location, here are the top Italian restaurants..."
Trip Planning
user: "Plan a walking tour of San Francisco's main attractions"
# GPT calls search_locations("tourist attractions in San Francisco")
# Then calls calculate_route multiple times to connect locations
assistant: "Here's a walking tour itinerary..."
Streaming Responses
def chat_with_streaming(message):
conversation.append({"role": "user", "content": message})
stream = openai.chat.completions.create(
model="gpt-4",
messages=conversation,
tools=tools,
stream=True
)
for chunk in stream:
if chunk.choices[0].delta.content:
print(chunk.choices[0].delta.content, end="")
# Handle tool calls in stream
if chunk.choices[0].delta.tool_calls:
# Process tool calls as they arrive
pass
Pricing
Camino AI per call
$0.001
Free tier
1,000 calls/month
GPT-4 function call
Standard token pricing
Combined cost
~$0.002-0.005 per interaction
Start Building Today
Give your AI agents location intelligence
1,000 free API calls every month • No credit card required