LlamaIndex Integration - Location Intelligence Tools

Integrate Camino AI with LlamaIndex to add location intelligence to your RAG applications and AI agents. Build location-aware query engines and agentic workflows with natural language location search.

LlamaIndex Integration: Add location intelligence to LlamaIndex applications using Camino AI as a custom tool. Build RAG systems and agents that can search locations, calculate routes, and provide spatial context alongside your document knowledge.

Why LlamaIndex + Camino AI?

  • Enhanced RAG: Combine document knowledge with real-time location data
  • Location-Aware Agents: Build agents that reason about both documents and physical locations
  • Multi-Modal Context: Augment text embeddings with spatial information
  • Cost-Effective: 17x cheaper than Google Places API

Quick Start: Custom Tool

1. Install Dependencies

pip install llama-index llama-index-llms-openai requests

2. Create Camino AI Tool

from llama_index.core.tools import FunctionTool
import requests
import os

CAMINO_API_KEY = os.getenv("CAMINO_API_KEY")

def search_locations(
    query: str,
    latitude: float = None,
    longitude: float = None,
    radius: int = 1000
) -> str:
    """
    Search for places and locations using natural language.
    
    Args:
        query (str): Natural language description of what to search for.
            Examples: "coffee shops", "hotels near Times Square", "family-friendly restaurants"
        latitude (float, optional): Center latitude for search. AI generates if omitted.
        longitude (float, optional): Center longitude for search. AI generates if omitted.
        radius (int, optional): Search radius in meters. Default 1000.
        
    Returns:
        str: Formatted string with location results including names, addresses, and AI summary
    """
    params = {
        "query": query,
        "radius": radius,
        "rank": True,
        "answer": True,
        "limit": 10
    }
    
    if latitude and longitude:
        params["lat"] = latitude
        params["lon"] = longitude
    
    headers = {"X-API-Key": CAMINO_API_KEY}
    
    try:
        response = requests.get(
            "https://api.getcamino.ai/query",
            params=params,
            headers=headers
        )
        response.raise_for_status()
        data = response.json()
        
        # Format for LLM consumption
        result = f"Found {len(data.get('results', []))} locations.\n\n"
        
        if 'answer' in data and data['answer']:
            result += f"Summary: {data['answer']}\n\n"
        
        result += "Top results:\n"
        for idx, place in enumerate(data.get('results', [])[:5], 1):
            name = place.get('name', 'Unknown')
            address = place.get('address', 'No address')
            distance = place.get('distance_km', 'N/A')
            result += f"{idx}. {name} - {address} ({distance}km away)\n"
        
        return result
        
    except Exception as e:
        return f"Error searching locations: {str(e)}"

# Create LlamaIndex tool
location_search_tool = FunctionTool.from_defaults(fn=search_locations)

3. Use with LlamaIndex Agent

from llama_index.core.agent import ReActAgent
from llama_index.llms.openai import OpenAI

# Initialize LLM
llm = OpenAI(model="gpt-4", temperature=0)

# Create agent with location tool
agent = ReActAgent.from_tools(
    [location_search_tool],
    llm=llm,
    verbose=True
)

# Query the agent
response = agent.chat("Find me the best Italian restaurants in Manhattan")
print(response)
How it Works:
  1. User asks a question that requires location data
  2. LlamaIndex agent recognizes the need and calls the Camino AI tool
  3. Location results are returned and integrated into the agent's reasoning
  4. Agent provides a complete, context-aware response

Advanced: Multiple Tools

Add Route Calculation Tool

def calculate_route(
    start_lat: float,
    start_lon: float,
    end_lat: float,
    end_lon: float,
    mode: str = "car"
) -> str:
    """
    Calculate route and get turn-by-turn directions between two locations.
    
    Args:
        start_lat (float): Starting latitude
        start_lon (float): Starting longitude
        end_lat (float): Destination latitude
        end_lon (float): Destination longitude
        mode (str): Transport mode - 'car', 'bike', or 'foot'. Default 'car'.
        
    Returns:
        str: Route information with distance, duration, and directions
    """
    params = {
        "start_lat": start_lat,
        "start_lon": start_lon,
        "end_lat": end_lat,
        "end_lon": end_lon,
        "mode": mode
    }
    
    headers = {"X-API-Key": CAMINO_API_KEY}
    
    try:
        response = requests.get(
            "https://api.getcamino.ai/route",
            params=params,
            headers=headers
        )
        response.raise_for_status()
        data = response.json()
        
        result = f"Route ({mode}):\n"
        result += f"Distance: {data.get('distance_km', 'N/A')}km\n"
        result += f"Duration: {data.get('duration_minutes', 'N/A')} minutes\n\n"
        
        if 'directions' in data:
            result += "Key directions:\n"
            for idx, step in enumerate(data['directions'][:5], 1):
                result += f"{idx}. {step.get('instruction', '')}\n"
        
        return result
    except Exception as e:
        return f"Error calculating route: {str(e)}"

route_tool = FunctionTool.from_defaults(fn=calculate_route)

# Create agent with both tools
agent = ReActAgent.from_tools(
    [location_search_tool, route_tool],
    llm=llm,
    verbose=True
)

RAG + Location Intelligence

Combine Documents with Location Data

from llama_index.core import VectorStoreIndex, SimpleDirectoryReader
from llama_index.core.tools import QueryEngineTool, ToolMetadata

# Load your documents
documents = SimpleDirectoryReader("./data/travel_guides").load_data()
index = VectorStoreIndex.from_documents(documents)

# Create query engine tool from your docs
doc_query_engine = index.as_query_engine()
doc_tool = QueryEngineTool(
    query_engine=doc_query_engine,
    metadata=ToolMetadata(
        name="travel_guide_search",
        description="Search through travel guide documents for recommendations, tips, and general information about destinations."
    )
)

# Combine with Camino AI location tool
agent = ReActAgent.from_tools(
    [doc_tool, location_search_tool, route_tool],
    llm=llm,
    verbose=True
)

# Agent can now use both document knowledge and real-time location data
response = agent.chat(
    "According to our travel guides, what's recommended in Tokyo? "
    "Then find me actual hotels near those recommended areas."
)
print(response)

Custom Query Engine

Location-Aware Query Engine

from llama_index.core.query_engine import CustomQueryEngine
from llama_index.core.response_synthesizers import get_response_synthesizer
from llama_index.core.retrievers import VectorIndexRetriever

class LocationAwareQueryEngine(CustomQueryEngine):
    """Custom query engine that enhances responses with location data"""
    
    def __init__(self, index, camino_api_key):
        self.retriever = VectorIndexRetriever(index=index, similarity_top_k=3)
        self.response_synthesizer = get_response_synthesizer()
        self.camino_key = camino_api_key
    
    def custom_query(self, query_str: str):
        # First, get relevant documents
        nodes = self.retriever.retrieve(query_str)
        
        # Extract location mentions from query
        # (In production, use NER or more sophisticated extraction)
        if "near" in query_str.lower() or "in" in query_str.lower():
            # Enhance with location data
            location_data = self._search_locations(query_str)
            nodes_text = "\n".join([n.node.get_content() for n in nodes])
            enhanced_context = f"{nodes_text}\n\nReal-time location data:\n{location_data}"
            
            # Create enhanced response
            from llama_index.core.schema import NodeWithScore, TextNode
            enhanced_node = NodeWithScore(
                node=TextNode(text=enhanced_context),
                score=1.0
            )
            nodes.append(enhanced_node)
        
        return self.response_synthesizer.synthesize(
            query=query_str,
            nodes=nodes
        )
    
    def _search_locations(self, query):
        # Call Camino AI
        params = {"query": query, "rank": True, "answer": True}
        headers = {"X-API-Key": self.camino_key}
        response = requests.get(
            "https://api.getcamino.ai/query",
            params=params,
            headers=headers
        )
        data = response.json()
        return data.get('answer', '')

# Use the custom engine
query_engine = LocationAwareQueryEngine(index, CAMINO_API_KEY)
response = query_engine.query("What are good hotels in San Francisco?")

Workflow Integration

Multi-Step Location Workflow

from llama_index.core.workflow import Workflow, StartEvent, StopEvent, step

class TravelPlanningWorkflow(Workflow):
    @step
    async def search_destinations(self, ev: StartEvent) -> StopEvent:
        query = ev.get("query")
        
        # Step 1: Search attractions
        attractions = search_locations(
            f"tourist attractions in {query}",
            latitude=ev.get("lat"),
            longitude=ev.get("lon")
        )
        
        # Step 2: Find nearby hotels
        hotels = search_locations(
            f"hotels near top attractions",
            latitude=ev.get("lat"),
            longitude=ev.get("lon")
        )
        
        # Step 3: Calculate routes
        # (simplified - in practice, parse coordinates from results)
        
        return StopEvent(result={
            "attractions": attractions,
            "hotels": hotels
        })

workflow = TravelPlanningWorkflow()
result = await workflow.run(query="Paris", lat=48.8566, lon=2.3522)

Use Cases

Travel Recommendation System

# Documents contain general travel advice
# Location tool provides real-time, specific recommendations
response = agent.chat(
    "I'm planning a family trip to London. "
    "What does our guide recommend, and find me family-friendly hotels."
)
# Agent uses both document tool and location tool

Real Estate Search

# Combine property documents with neighborhood data
response = agent.chat(
    "Tell me about properties in the Marina District SF, "
    "then find nearby schools and grocery stores."
)
# Agent retrieves property docs and searches real-time locations

Restaurant Finder with Reviews

# Documents have restaurant reviews
# Location tool finds current options
response = agent.chat(
    "Based on our reviews, which Italian restaurants in NYC are best? "
    "Find me their current locations and hours."
)

Best Practices

  • Tool Descriptions: Write clear descriptions so the agent knows when to use location tools vs document retrieval
  • Error Handling: Always handle API errors gracefully with fallback responses
  • Caching: Cache location results for frequently queried places
  • Combine Sources: Use both document knowledge (static) and location APIs (dynamic) for comprehensive answers
  • Context Windows: Keep location results concise to fit in context with document retrievals
  • Async Operations: Use async/await for parallel document and location queries

Complete Example: Travel Agent

from llama_index.core.agent import ReActAgent
from llama_index.llms.openai import OpenAI
from llama_index.core.tools import FunctionTool, QueryEngineTool
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader
import requests
import os

class CaminoTravelAgent:
    def __init__(self, openai_key, camino_key, docs_path):
        os.environ["OPENAI_API_KEY"] = openai_key
        self.camino_key = camino_key
        
        # Load travel guide documents
        documents = SimpleDirectoryReader(docs_path).load_data()
        index = VectorStoreIndex.from_documents(documents)
        
        # Create document query tool
        query_engine = index.as_query_engine()
        doc_tool = QueryEngineTool.from_defaults(
            query_engine=query_engine,
            name="travel_guides",
            description="Search travel guides for general recommendations and advice"
        )
        
        # Create location tools
        location_tool = FunctionTool.from_defaults(
            fn=self._search_locations,
            name="search_locations",
            description="Find specific places like hotels, restaurants, attractions"
        )
        
        route_tool = FunctionTool.from_defaults(
            fn=self._calculate_route,
            name="calculate_route",
            description="Get directions between two locations"
        )
        
        # Create agent
        llm = OpenAI(model="gpt-4", temperature=0)
        self.agent = ReActAgent.from_tools(
            [doc_tool, location_tool, route_tool],
            llm=llm,
            verbose=True
        )
    
    def _search_locations(self, query: str, latitude: float = None, longitude: float = None) -> str:
        params = {"query": query, "rank": True, "answer": True}
        if latitude and longitude:
            params.update({"lat": latitude, "lon": longitude})
        
        response = requests.get(
            "https://api.getcamino.ai/query",
            params=params,
            headers={"X-API-Key": self.camino_key}
        )
        data = response.json()
        return data.get('answer', 'No results found')
    
    def _calculate_route(self, start_lat: float, start_lon: float, 
                        end_lat: float, end_lon: float, mode: str = "foot") -> str:
        params = {
            "start_lat": start_lat, "start_lon": start_lon,
            "end_lat": end_lat, "end_lon": end_lon, "mode": mode
        }
        response = requests.get(
            "https://api.getcamino.ai/route",
            params=params,
            headers={"X-API-Key": self.camino_key}
        )
        data = response.json()
        return f"Distance: {data.get('distance_km')}km, Duration: {data.get('duration_minutes')}min"
    
    def plan_trip(self, request):
        return self.agent.chat(request)

# Use the agent
agent = CaminoTravelAgent(
    openai_key="your-key",
    camino_key="your-camino-key",
    docs_path="./travel_guides"
)

response = agent.plan_trip(
    "I'm visiting Rome for 3 days. What does our guide recommend? "
    "Find me hotels near the Colosseum and popular restaurants."
)
print(response)

Pricing

Per API call $0.001
Free tier 1,000 calls/month
LlamaIndex Free + LLM costs
Combined ~$0.002-0.01 per query

Start Building Today

Give your AI agents location intelligence

1,000 free API calls every month • No credit card required