Integrate
Artificial intelligence

Vercel AI SDK

Use MCP tools with Vercel AI SDK

The Integrate SDK provides seamless integration with Vercel's AI SDK, allowing AI models to access your integrations through MCP tools.

Installation

Install the Integrate SDK and Vercel AI SDK packages:

bun add integrate-sdk ai @ai-sdk/react

Setup

Setting up Vercel AI integration requires 3 files (or 5 files if not using a database):

1. Server Configuration

Create a server configuration file with your OAuth credentials:

// lib/integrate.ts
import { createMCPServer, githubIntegration } from "integrate-sdk/server";

export const { client: serverClient } = createMCPServer({
  apiKey: process.env.INTEGRATE_API_KEY,
  integrations: [
    githubIntegration({
      scopes: ["repo", "user"],
    }),
  ],
});

2. Providers (only if no database)

If you're not using a database, create a providers component that injects tokens:

// app/providers.tsx
"use client";

import { client } from "integrate-sdk";
import { useIntegrateAI } from "integrate-sdk/react";

export function Providers({ children }: { children: React.ReactNode }) {
  useIntegrateAI(client);
  return <>{children}</>;
}

3. Layout (only if no database)

Wrap your app with the Providers component:

// app/layout.tsx
import { Providers } from "./providers";

export default function RootLayout({
  children,
}: Readonly<{
  children: React.ReactNode;
}>) {
  return (
    <html lang="en">
      <body>
        <Providers>{children}</Providers>
      </body>
    </html>
  );
}

4. OAuth Handler

Create a catch-all route that handles OAuth operations at app/api/integrate/[...all]/route.ts:

import { serverClient } from "@/lib/integrate";
import { toNextJsHandler } from "integrate-sdk/server";

export const { POST, GET } = toNextJsHandler(serverClient);

5. Chat API Route

Create a chat route that converts MCP tools to Vercel AI format:

// app/api/chat/route.ts
import { serverClient } from "@/lib/integrate";
import { getVercelAITools } from "integrate-sdk/server";
import { convertToModelMessages, stepCountIs, streamText } from "ai";

export async function POST(req: Request) {
  const { messages } = await req.json();

  const result = streamText({
    model: "openai/gpt-5-mini",
    messages: convertToModelMessages(messages),
    tools: await getVercelAITools(serverClient),
    stopWhen: stepCountIs(5),
  });

  return result.toUIMessageStreamResponse();
}

Usage

Use the Vercel AI SDK's useChat hook in any component:

// app/page.tsx
"use client";

import { useChat } from "@ai-sdk/react";
import { Streamdown } from "streamdown";
import { useState, useEffect } from "react";
import { client } from "integrate-sdk";

export default function ChatPage() {
  const { messages, sendMessage, status } = useChat();
  const [input, setInput] = useState("");

  const [githubAuthorized, setGithubAuthorized] = useState(false);

  const isLoading = status === "streaming";

  useEffect(() => {
    client.isAuthorized("github").then(setGithubAuthorized);
  }, []);

  async function handleGithubClick() {
    try {
      if (githubAuthorized) {
        await client.disconnectProvider("github");
      } else {
        await client.authorize("github");
      }
      setGithubAuthorized(client.isAuthorized("github"));
    } catch (error) {
      console.error("Error:", error);
    }
  }

  return (
    <div className="flex flex-col h-screen">
      <div className="flex-1 overflow-y-auto p-4 space-y-4">
        {messages.map((message) => (
          <div
            key={message.id}
            className={message.role === "user" ? "text-right" : "text-left"}
          >
            <div className="inline-block max-w-2xl">
              <Streamdown
                isAnimating={isLoading && message.role === "assistant"}
              >
                {message.parts
                  .filter((part) => part.type === "text")
                  .map((part) => ("text" in part ? part.text : ""))
                  .join("")}
              </Streamdown>
            </div>
          </div>
        ))}
      </div>
      <form
        onSubmit={(e) => {
          e.preventDefault();
          if (input.trim()) {
            sendMessage({ text: input });
            setInput("");
          }
        }}
        className="p-4 border-t border-gray-500 space-y-2"
      >
        <input
          value={input}
          onChange={(e) => setInput(e.target.value)}
          placeholder="Ask me anything..."
          className="w-full px-4 py-2 border border-gray-500 rounded-lg"
          disabled={isLoading}
        />
        <div className="flex justify-between">
          <button
            type="button"
            onClick={handleGithubClick}
            className="px-4 py-2 bg-black text-white rounded-lg disabled:opacity-50"
          >
            {githubAuthorized ? "Disconnect" : "Connect"} GitHub
          </button>
          <button
            type="submit"
            disabled={isLoading || !input.trim()}
            className="px-4 py-2 bg-black text-white rounded-lg disabled:opacity-50"
          >
            Send
          </button>
        </div>
      </form>
    </div>
  );
}

Next Steps