LangChain LangServeOverview Integration with a LangServe server via Vercel AI SDK. Getting Started Create a Next.JS projectnpx create-next-app@latest my-app cd my-appInstall @langchain/core, ai-sdk and @assistant-ui/reactnpm install @assistant-ui/react @assistant-ui/react-ai-sdk ai ai/react @langchain/coreSetup a backend route under /api/chat@/app/api/chat/route.tsimport { RemoteRunnable } from "@langchain/core/runnables/remote"; import type { RunnableConfig } from "@langchain/core/runnables"; import { streamText, LangChainAdapter, type Message } from "ai"; export const maxDuration = 30; export async function POST(req: Request) { const { messages } = (await req.json()) as { messages: Message[] }; // TODO replace with your own langserve URL const remoteChain = new RemoteRunnable< { messages: Message[] }, string, RunnableConfig >({ url: "<YOUR_LANGSERVE_URL>", }); const stream = await remoteChain.stream({ messages, }); return LangChainAdapter.toDataStreamResponse(stream); }Define a MyRuntimeProvider component@/app/MyRuntimeProvider.tsx"use client"; import { useChat } from "ai/react"; import { AssistantRuntimeProvider } from "@assistant-ui/react"; import { useVercelUseChatRuntime } from "@assistant-ui/react-ai-sdk"; export function MyRuntimeProvider({ children, }: Readonly<{ children: React.ReactNode; }>) { const chat = useChat({ api: "/api/chat", unstable_AISDKInterop: true, }); const runtime = useVercelUseChatRuntime(chat); return ( <AssistantRuntimeProvider runtime={runtime}> {children} </AssistantRuntimeProvider> ); }Wrap your app in MyRuntimeProvider@/app/layout.tsximport type { ReactNode } from "react"; import { MyRuntimeProvider } from "@/app/MyRuntimeProvider"; export default function RootLayout({ children, }: Readonly<{ children: ReactNode; }>) { return ( <MyRuntimeProvider> <html lang="en"> <body>{children}</body> </html> </MyRuntimeProvider> ); }PreviousPart 3: Approval UINextLocalRuntime