Build an AI Chatbot with Next.js 15 and OpenAI — Full Stack Guide 2026

Sanjeev SharmaSanjeev Sharma
4 min read

Advertisement

Build a Production AI Chatbot with Next.js 15 + OpenAI

In this guide, you'll build a full-featured AI chatbot app from scratch — streaming responses, persistent chat history, and a polished UI.

Project Setup

npx create-next-app@latest ai-chatbot --typescript --tailwind --app
cd ai-chatbot
npm install openai ai @ai-sdk/openai

API Route: Streaming Chat Endpoint

// app/api/chat/route.ts
import { openai } from '@ai-sdk/openai'
import { streamText, Message } from 'ai'

export const runtime = 'edge'
export const maxDuration = 30

export async function POST(req: Request) {
  const { messages }: { messages: Message[] } = await req.json()

  const result = streamText({
    model: openai('gpt-4o'),
    system: `You are a helpful assistant. Be concise, friendly, and accurate.
    Format code blocks with proper syntax highlighting markers.
    Today's date is ${new Date().toDateString()}.`,
    messages,
    maxTokens: 1000,
  })

  return result.toDataStreamResponse()
}

Chat UI Component

// components/ChatMessage.tsx
import { Message } from 'ai'
import ReactMarkdown from 'react-markdown'
import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter'
import { oneDark } from 'react-syntax-highlighter/dist/esm/styles/prism'

export function ChatMessage({ message }: { message: Message }) {
  const isUser = message.role === 'user'

  return (
    <div className={`flex ${isUser ? 'justify-end' : 'justify-start'} mb-4`}>
      <div
        className={`max-w-[80%] rounded-2xl px-4 py-3 ${
          isUser
            ? 'bg-blue-600 text-white'
            : 'bg-gray-100 dark:bg-gray-800 text-gray-900 dark:text-gray-100'
        }`}
      >
        <ReactMarkdown
          components={{
            code({ className, children }) {
              const language = /language-(\w+)/.exec(className || '')?.[1]
              return language ? (
                <SyntaxHighlighter style={oneDark} language={language} PreTag="div">
                  {String(children).replace(/\n$/, '')}
                </SyntaxHighlighter>
              ) : (
                <code className="bg-gray-200 dark:bg-gray-700 px-1 rounded text-sm">
                  {children}
                </code>
              )
            },
          }}
        >
          {message.content}
        </ReactMarkdown>
      </div>
    </div>
  )
}

Main Chat Page

// app/page.tsx
'use client'

import { useChat } from 'ai/react'
import { ChatMessage } from '@/components/ChatMessage'
import { useEffect, useRef } from 'react'

export default function ChatPage() {
  const { messages, input, handleInputChange, handleSubmit, isLoading, error } = useChat({
    api: '/api/chat',
    onError: (err) => console.error('Chat error:', err),
  })
  const messagesEndRef = useRef<HTMLDivElement>(null)

  useEffect(() => {
    messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' })
  }, [messages])

  return (
    <div className="flex flex-col h-screen bg-white dark:bg-gray-900">
      {/* Header */}
      <header className="border-b border-gray-200 dark:border-gray-700 p-4">
        <h1 className="text-xl font-bold text-gray-900 dark:text-white">AI Assistant</h1>
        <p className="text-sm text-gray-500">Powered by GPT-4o</p>
      </header>

      {/* Messages */}
      <div className="flex-1 overflow-y-auto p-4">
        {messages.length === 0 && (
          <div className="text-center text-gray-400 mt-20">
            <p className="text-2xl mb-2">👋 How can I help you today?</p>
            <p>Ask me anything about coding, technology, or anything else!</p>
          </div>
        )}
        {messages.map((message) => (
          <ChatMessage key={message.id} message={message} />
        ))}
        {isLoading && (
          <div className="flex justify-start mb-4">
            <div className="bg-gray-100 dark:bg-gray-800 rounded-2xl px-4 py-3">
              <div className="flex space-x-1">
                <div className="w-2 h-2 bg-gray-400 rounded-full animate-bounce" />
                <div className="w-2 h-2 bg-gray-400 rounded-full animate-bounce delay-100" />
                <div className="w-2 h-2 bg-gray-400 rounded-full animate-bounce delay-200" />
              </div>
            </div>
          </div>
        )}
        {error && (
          <div className="text-red-500 text-center p-2">Error: {error.message}</div>
        )}
        <div ref={messagesEndRef} />
      </div>

      {/* Input */}
      <form onSubmit={handleSubmit} className="border-t border-gray-200 dark:border-gray-700 p-4">
        <div className="flex gap-2 max-w-4xl mx-auto">
          <input
            value={input}
            onChange={handleInputChange}
            placeholder="Type a message..."
            className="flex-1 border border-gray-300 dark:border-gray-600 rounded-xl px-4 py-3
                       bg-white dark:bg-gray-800 text-gray-900 dark:text-white
                       focus:outline-none focus:ring-2 focus:ring-blue-500"
            disabled={isLoading}
          />
          <button
            type="submit"
            disabled={isLoading || !input.trim()}
            className="bg-blue-600 hover:bg-blue-700 disabled:opacity-50 text-white
                       rounded-xl px-6 py-3 font-medium transition-colors"
          >
            Send
          </button>
        </div>
      </form>
    </div>
  )
}

Add Persistent Chat History (localStorage)

// hooks/useChatHistory.ts
import { useState, useEffect } from 'react'
import { Message } from 'ai'

export function useChatHistory(chatId: string) {
  const [history, setHistory] = useState<Message[]>([])

  useEffect(() => {
    const saved = localStorage.getItem(`chat-${chatId}`)
    if (saved) setHistory(JSON.parse(saved))
  }, [chatId])

  const saveHistory = (messages: Message[]) => {
    localStorage.setItem(`chat-${chatId}`, JSON.stringify(messages))
    setHistory(messages)
  }

  const clearHistory = () => {
    localStorage.removeItem(`chat-${chatId}`)
    setHistory([])
  }

  return { history, saveHistory, clearHistory }
}

Deploy to Vercel

npm run build  # test build locally
vercel deploy  # deploy

# Set environment variable:
# OPENAI_API_KEY=sk-...

The Vercel AI SDK's streamText with runtime = 'edge' gives sub-100ms first-token latency from Vercel's edge network.


Rate Limiting (Production Must-Have)

// middleware.ts
import { Ratelimit } from '@upstash/ratelimit'
import { Redis } from '@upstash/redis'
import { NextResponse } from 'next/server'

const ratelimit = new Ratelimit({
  redis: Redis.fromEnv(),
  limiter: Ratelimit.slidingWindow(10, '1m'), // 10 requests per minute
})

export async function middleware(request: Request) {
  const ip = request.headers.get('x-forwarded-for') ?? '127.0.0.1'
  const { success } = await ratelimit.limit(ip)

  if (!success) {
    return NextResponse.json({ error: 'Too many requests' }, { status: 429 })
  }
}

export const config = { matcher: '/api/chat' }

Advertisement

Sanjeev Sharma

Written by

Sanjeev Sharma

Full Stack Engineer · E-mopro