Skip to content
Go back

Designing Advanced Chat UX with OpenAI Streaming

Designing Advanced Chat UX with OpenAI Streaming

Introduction

Streaming chat responses improve perceived performance. Use OpenAI’s streaming API to update messages incrementally.

Prerequisites

Step 1: API Route for Streaming

Create app/api/chat/stream/route.ts:

import { NextRequest } from 'next/server';
import OpenAI from 'openai';

const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY! });

export async function POST(req: NextRequest) {
  const { messages } = await req.json();

  const response = await openai.chat.completions.create({
    model: 'gpt-4o',
    messages,
    stream: true,
  });

  const encoder = new TextEncoder();
  const stream = new ReadableStream({
    async start(controller) {
      for await (const chunk of response) {
        const content = chunk.choices[0]?.delta?.content || '';
        controller.enqueue(encoder.encode(content));
      }
      controller.close();
    },
  });

  return new Response(stream, {
    headers: { 'Content-Type': 'text/plain' },
  });
}

Step 2: Client-side Streaming

Create components/StreamingChat.tsx:

'use client';

import { useState } from 'react';

export default function StreamingChat() {
  const [messages, setMessages] = useState<string[]>([]);
  const [input, setInput] = useState('');

  const sendMessage = async () => {
    const userText = input;
    setMessages(prev => [...prev, `User: ${userText}`]);
    setInput('');

    const res = await fetch('/api/chat/stream', {
      method: 'POST',
      headers: { 'Content-Type': 'application/json' },
      body: JSON.stringify({ messages: [{ role: 'user', content: userText }] }),
    });

    const reader = res.body!.getReader();
    const decoder = new TextDecoder();
    let assistantText = '';

    while (true) {
      const { value, done } = await reader.read();
      if (done) break;
      assistantText += decoder.decode(value);
      setMessages(prev => [...prev.filter((_,i)=>i<prev.length-1), assistantText]);
    }

    setMessages(prev => [...prev, `Assistant: ${assistantText}`]);
  };

  return (
    <div>
      <div className="chat-window h-64 overflow-y-auto p-4 bg-gray-100 rounded">
        {messages.map((msg, idx) => (
          <div key={idx} className="mb-2 whitespace-pre-wrap">{msg}</div>
        ))}
      </div>
      <div className="mt-4 flex">
        <input
          className="flex-1 p-2 border rounded"
          value={input}
          onChange={e => setInput(e.target.value)}
        />
        <button onClick={sendMessage} className="ml-2 px-4 bg-blue-500 text-white rounded">
          Send
        </button>
      </div>
    </div>
  );
}

Summary

Streaming enhances chat UX by rendering AI responses as they arrive, reducing wait times and engaging users with real-time feedback.


Share this post on:

Previous Post
Comparing Vector Databases: Pinecone vs. Weaviate vs. Milvus
Next Post
Function Calling in Fastify with OpenAI Functions