Skip to content

LangGraph Integration

Use StrataRouter for conditional edge routing in LangGraph state-machine graphs.

Installation

pip install "stratarouter[langchain]" langgraph

Overview

LangGraph builds AI workflows as directed graphs. StrataRouter replaces hand-written if/elif conditional edges with semantic routing — letting you dispatch to the right node based on what the user actually means, not just keywords.

User Input
[Router Node] ──────semantic routing──────▶ [billing_node]
                                       └──▶ [support_node]
                                       └──▶ [sales_node]

Quick Start

from langgraph.graph import StateGraph, END
from stratarouter import Router, Route
from stratarouter.integrations.langgraph import StrataRouterNode
from typing import TypedDict

# Define state
class State(TypedDict):
    query: str
    route: str
    response: str

# Build router
router = Router(dimension=384)

billing = Route("billing")
billing.examples = ["invoice", "refund", "payment failed"]
billing.keywords = ["invoice", "payment", "refund", "billing"]
router.add_route(billing)

support = Route("support")
support.examples = ["app crash", "can't login", "bug report"]
support.keywords = ["bug", "crash", "error", "broken"]
router.add_route(support)

router.build_index()

# Create router node
router_node = StrataRouterNode(router, state_key="query", output_key="route")

# Define handler nodes
def billing_node(state: State) -> State:
    return {**state, "response": f"[Billing] Handling: {state['query']}"}

def support_node(state: State) -> State:
    return {**state, "response": f"[Support] Handling: {state['query']}"}

# Routing function for conditional edges
def route_query(state: State) -> str:
    return state["route"]

# Build graph
graph = StateGraph(State)
graph.add_node("router", router_node)
graph.add_node("billing", billing_node)
graph.add_node("support", support_node)

graph.set_entry_point("router")
graph.add_conditional_edges("router", route_query, {
    "billing": "billing",
    "support": "support",
})
graph.add_edge("billing", END)
graph.add_edge("support", END)

app = graph.compile()

# Run
result = app.invoke({"query": "Where's my invoice?", "route": "", "response": ""})
print(result["response"])  # [Billing] Handling: Where's my invoice?

Confidence-Based Routing

Handle low-confidence situations gracefully with a fallback node:

class State(TypedDict):
    query: str
    route: str
    confidence: float
    response: str

router_node = StrataRouterNode(
    router,
    state_key="query",
    output_key="route",
    confidence_key="confidence"   # writes confidence to state
)

def route_query(state: State) -> str:
    if state["confidence"] < 0.5:
        return "clarify"          # send to clarification node
    return state["route"]

def clarify_node(state: State) -> State:
    return {**state, "response": "Could you clarify what you need help with?"}

graph.add_node("clarify", clarify_node)
graph.add_conditional_edges("router", route_query, {
    "billing": "billing",
    "support": "support",
    "clarify": "clarify",
})

Multi-Step Workflows

Use StrataRouter at multiple points in a complex workflow:

class WorkflowState(TypedDict):
    query: str
    intent: str
    complexity: str
    response: str

# First router: intent classification
intent_router = Router(dimension=384)
intent_router.add_route(Route("billing", examples=["invoice", "refund"]))
intent_router.add_route(Route("support", examples=["bug", "crash"]))
intent_router.build_index()

# Second router: complexity classification
complexity_router = Router(dimension=384)
complexity_router.add_route(Route("simple", examples=["quick question", "lookup"]))
complexity_router.add_route(Route("complex", examples=["escalation", "investigation"]))
complexity_router.build_index()

intent_node     = StrataRouterNode(intent_router, state_key="query", output_key="intent")
complexity_node = StrataRouterNode(complexity_router, state_key="query", output_key="complexity")

graph = StateGraph(WorkflowState)
graph.add_node("detect_intent", intent_node)
graph.add_node("detect_complexity", complexity_node)
graph.add_node("simple_billing",  handle_simple_billing)
graph.add_node("complex_billing", handle_complex_billing)
graph.add_node("simple_support",  handle_simple_support)
graph.add_node("complex_support", handle_complex_support)

graph.set_entry_point("detect_intent")
graph.add_edge("detect_intent", "detect_complexity")
graph.add_conditional_edges("detect_complexity",
    lambda s: f"{s['intent']}_{s['complexity']}",
    {
        "billing_simple":  "simple_billing",
        "billing_complex": "complex_billing",
        "support_simple":  "simple_support",
        "support_complex": "complex_support",
    }
)

Human-in-the-Loop Routing

Combine StrataRouter with LangGraph's interrupt mechanism:

from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import StateGraph, END, interrupt

def router_with_review(state: State) -> State:
    route_result = router.route(state["query"], embed(state["query"]))

    if route_result.confidence < 0.7:
        # Pause and ask human for confirmation
        human_choice = interrupt({
            "question": f"Route to '{route_result.route_id}'?",
            "confidence": route_result.confidence,
            "options": ["billing", "support", "sales"]
        })
        return {**state, "route": human_choice}

    return {**state, "route": route_result.route_id}

Next Steps