LangChain Integration - Location Intelligence Tools

Integrate Camino AI with LangChain to add location intelligence to your AI agents. Use custom tools and function calling for natural language location queries, route planning, and spatial reasoning.

LangChain Integration: Add location intelligence to LangChain agents using Camino AI's REST API. Create custom tools for location search, routing, and spatial analysis that your agents can call automatically.

Why LangChain + Camino AI?

  • Natural Language Location Queries: Let agents search locations using plain English ("find pet-friendly hotels near downtown")
  • Spatial Reasoning: Calculate routes, distances, and travel times between locations
  • Multi-Step Planning: Combine location queries with other LangChain tools for complex workflows
  • Cost-Effective: 17x cheaper than Google Places API at $0.001 per call

Quick Start: Custom LangChain Tool

1. Install Dependencies

pip install langchain langgraph openai requests

2. Create Camino AI Tool

from langchain.tools import Tool
import requests
import os

CAMINO_API_KEY = os.getenv("CAMINO_API_KEY")
CAMINO_API_URL = "https://api.getcamino.ai/query"

def search_locations(query: str, latitude: float = None, longitude: float = None, radius: int = 1000) -> str:
    """
    Search for locations using natural language.
    
    Args:
        query: Natural language location query (e.g., "coffee shops", "hotels near Times Square")
        latitude: Center latitude for search (optional, AI generates if omitted)
        longitude: Center longitude for search (optional, AI generates if omitted)
        radius: Search radius in meters (default: 1000)
        
    Returns:
        JSON string with location results including names, addresses, coordinates, and AI summary
    """
    params = {
        "query": query,
        "radius": radius,
        "rank": True,
        "answer": True,
        "limit": 10
    }
    
    if latitude and longitude:
        params["lat"] = latitude
        params["lon"] = longitude
    
    headers = {"X-API-Key": CAMINO_API_KEY}
    
    try:
        response = requests.get(CAMINO_API_URL, params=params, headers=headers)
        response.raise_for_status()
        data = response.json()
        
        # Format results for LLM consumption
        results_summary = f"Found {len(data.get('results', []))} locations.\n\n"
        
        if 'answer' in data:
            results_summary += f"Summary: {data['answer']}\n\n"
        
        results_summary += "Top results:\n"
        for idx, place in enumerate(data.get('results', [])[:5], 1):
            results_summary += f"{idx}. {place.get('name', 'Unknown')} - {place.get('address', 'No address')} ({place.get('distance_km', 'N/A')}km away)\n"
        
        return results_summary
        
    except Exception as e:
        return f"Error searching locations: {str(e)}"

camino_location_tool = Tool(
    name="search_locations",
    func=search_locations,
    description="Search for places and locations using natural language. Useful for finding restaurants, hotels, shops, attractions, or any physical location. Can find places near specific coordinates or intelligently understand location context from the query."
)

3. Use with LangChain Agent

from langchain.agents import initialize_agent, AgentType
from langchain_openai import ChatOpenAI

# Initialize LLM
llm = ChatOpenAI(model="gpt-4", temperature=0)

# Create agent with Camino AI tool
agent = initialize_agent(
    tools=[camino_location_tool],
    llm=llm,
    agent=AgentType.OPENAI_FUNCTIONS,
    verbose=True
)

# Query the agent
result = agent.run("Find me 3 highly-rated coffee shops near the Empire State Building in New York")
print(result)
Key Features:
  1. Automatic tool selection - LangChain decides when to use Camino AI
  2. Natural language input - agents understand location intent
  3. Structured output - formatted results ready for further processing

Advanced: Multi-Tool Integration

Add Route Planning Tool

def calculate_route(start_lat: float, start_lon: float, end_lat: float, end_lon: float, mode: str = "foot") -> str:
    """
    Calculate route between two locations.
    
    Args:
        start_lat: Starting latitude
        start_lon: Starting longitude
        end_lat: Destination latitude
        end_lon: Destination longitude
        mode: Transport mode ('foot', 'car', 'bike')
        
    Returns:
        Route information with distance, duration, and turn-by-turn directions
    """
    params = {
        "start_lat": start_lat,
        "start_lon": start_lon,
        "end_lat": end_lat,
        "end_lon": end_lon,
        "mode": mode
    }
    
    headers = {"X-API-Key": CAMINO_API_KEY}
    
    try:
        response = requests.get("https://api.getcamino.ai/route", params=params, headers=headers)
        response.raise_for_status()
        data = response.json()
        
        summary = f"Route ({mode}):\n"
        summary += f"Distance: {data.get('distance_km', 'N/A')}km ({data.get('distance_miles', 'N/A')} miles)\n"
        summary += f"Duration: {data.get('duration_minutes', 'N/A')} minutes\n\n"
        
        if 'directions' in data:
            summary += "Directions:\n"
            for idx, step in enumerate(data['directions'][:10], 1):
                summary += f"{idx}. {step.get('instruction', '')} - {step.get('distance', '')}\n"
        
        return summary
        
    except Exception as e:
        return f"Error calculating route: {str(e)}"

camino_route_tool = Tool(
    name="calculate_route",
    func=calculate_route,
    description="Calculate route and get turn-by-turn directions between two geographic coordinates. Supports walking (foot), driving (car), and cycling (bike) modes. Returns distance, duration, and detailed directions."
)

# Add both tools to agent
agent = initialize_agent(
    tools=[camino_location_tool, camino_route_tool],
    llm=llm,
    agent=AgentType.OPENAI_FUNCTIONS,
    verbose=True
)

Use Cases

Travel Planning Agent

# Agent can combine location search with route planning
query = """
I'm visiting San Francisco for a day. Find me:
1. A good breakfast spot near Union Square
2. Popular tourist attractions within 2 miles
3. Calculate walking routes between them
"""

result = agent.run(query)
# Agent automatically uses search_locations and calculate_route as needed

Real Estate Assistant

query = """
I'm looking at a property at 123 Main St, Seattle (47.6062, -122.3321).
Find nearby:
- Elementary schools within 2km
- Grocery stores within 1km
- Coffee shops within 500m
"""

result = agent.run(query)

Location-Aware Customer Service

query = """
Customer needs to find the nearest service center to their location (40.7589, -73.9851).
Find our service centers within 5 miles and tell them which is closest.
"""

result = agent.run(query)

Using LangChain Expression Language (LCEL)

from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
from langchain_core.runnables import RunnablePassthrough

# Create a chain for location-based recommendations
prompt = ChatPromptTemplate.from_messages([
    ("system", "You are a helpful travel assistant with access to location data."),
    ("user", "{question}")
])

# Define retriever function
def get_nearby_places(query):
    return search_locations(query, latitude=40.7589, longitude=-73.9851)

chain = (
    {"question": RunnablePassthrough(), "context": get_nearby_places}
    | prompt
    | llm
    | StrOutputParser()
)

result = chain.invoke("What are some good Italian restaurants nearby?")

LangGraph Integration

from langgraph.graph import Graph
from langgraph.prebuilt import ToolExecutor

# Define workflow nodes
def search_node(state):
    query = state["query"]
    results = search_locations(query)
    return {"results": results}

def route_node(state):
    start = state["start_coords"]
    end = state["end_coords"]
    route = calculate_route(start[0], start[1], end[0], end[1])
    return {"route": route}

# Build graph
workflow = Graph()
workflow.add_node("search", search_node)
workflow.add_node("route", route_node)
workflow.add_edge("search", "route")
workflow.set_entry_point("search")

app = workflow.compile()

# Run workflow
result = app.invoke({
    "query": "museums in Paris",
    "start_coords": (48.8566, 2.3522),
    "end_coords": (48.8606, 2.3376)
})

Best Practices

  • Tool Descriptions: Write clear, detailed descriptions so LLM knows when to use each tool
  • Error Handling: Always catch and return user-friendly error messages
  • Context Formatting: Format results for LLM consumption (summaries, top N items)
  • Parameter Validation: Validate coordinates and query inputs before API calls
  • Rate Limiting: Implement caching for frequently accessed locations
  • Combine Tools: Use multiple Camino AI endpoints (search, route, context) for richer workflows

Example: Complete Travel Agent

from langchain.agents import initialize_agent
from langchain.tools import Tool
from langchain_openai import ChatOpenAI
import requests
import os

class CaminoAITools:
    def __init__(self, api_key):
        self.api_key = api_key
        self.base_url = "https://api.getcamino.ai"
        self.headers = {"X-API-Key": api_key}
    
    def search(self, query, lat=None, lon=None, radius=1000):
        params = {"query": query, "radius": radius, "rank": True, "answer": True}
        if lat and lon:
            params.update({"lat": lat, "lon": lon})
        
        response = requests.get(f"{self.base_url}/query", params=params, headers=self.headers)
        return response.json()
    
    def route(self, start_lat, start_lon, end_lat, end_lon, mode="foot"):
        params = {
            "start_lat": start_lat, "start_lon": start_lon,
            "end_lat": end_lat, "end_lon": end_lon, "mode": mode
        }
        response = requests.get(f"{self.base_url}/route", params=params, headers=self.headers)
        return response.json()
    
    def context(self, lat, lon, radius=500, context_query=""):
        data = {
            "location": {"lat": lat, "lon": lon},
            "radius": radius,
            "context": context_query
        }
        response = requests.post(f"{self.base_url}/context", json=data, headers=self.headers)
        return response.json()

# Initialize tools
camino = CaminoAITools(os.getenv("CAMINO_API_KEY"))

tools = [
    Tool(
        name="search_locations",
        func=lambda q: str(camino.search(q)),
        description="Search for places using natural language"
    ),
    Tool(
        name="get_location_context",
        func=lambda lat, lon: str(camino.context(lat, lon)),
        description="Get contextual information about a location"
    )
]

# Create travel agent
travel_agent = initialize_agent(
    tools=tools,
    llm=ChatOpenAI(model="gpt-4"),
    agent=AgentType.OPENAI_FUNCTIONS,
    verbose=True
)

# Use agent
result = travel_agent.run("Plan a half-day walking tour of Paris starting at the Louvre")

Pricing

Per API call $0.001
Free tier 1,000 calls/month
Cost vs Google Places 17x cheaper

Start Building Today

Give your AI agents location intelligence

1,000 free API calls every month • No credit card required