import asyncio
from langchain_mcp_adapters.client import MultiServerMCPClient
from langchain.chat_models import init_chat_model
from langgraph.graph import StateGraph, MessagesState, START
from langgraph.prebuilt import ToolNode, tools_condition
async def main():
model = init_chat_model("anthropic:claude-sonnet-4-20250514")
client = MultiServerMCPClient({
"xquik": {
"transport": "streamable_http",
"url": "https://xquik.com/mcp",
"headers": {"x-api-key": "xq_YOUR_KEY_HERE"},
},
})
tools = await client.get_tools()
def call_model(state: MessagesState):
return {"messages": model.bind_tools(tools).invoke(state["messages"])}
builder = StateGraph(MessagesState)
builder.add_node(call_model)
builder.add_node(ToolNode(tools))
builder.add_edge(START, "call_model")
builder.add_conditional_edges("call_model", tools_condition)
builder.add_edge("tools", "call_model")
graph = builder.compile()
result = await graph.ainvoke(
{"messages": [{"role": "user", "content": "Look up @elonmusk's profile"}]}
)
print(result["messages"][-1].content)
asyncio.run(main())