Integrating OpenAI API with Next.js
Introduction
Integrate OpenAI’s API with Next.js to build intelligent applications. This guide covers setup, authentication, and implementing chat completions using the latest OpenAI SDK.
Prerequisites
- Next.js 14+
- OpenAI API account and key
- Node.js >=18
Step 1: Install OpenAI SDK
npm install openai
Step 2: Configure Environment Variables
Add to .env.local
:
OPENAI_API_KEY=sk-your-api-key-here
Step 3: Create OpenAI Client
Create lib/openai.ts
:
import OpenAI from 'openai';
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});
export default openai;
Step 4: Create API Route
Create app/api/chat/route.ts
:
import { NextRequest, NextResponse } from 'next/server';
import openai from '@/lib/openai';
export async function POST(req: NextRequest) {
try {
const { messages } = await req.json();
const completion = await openai.chat.completions.create({
model: "gpt-4",
messages: messages,
max_tokens: 150,
temperature: 0.7,
});
return NextResponse.json({
message: completion.choices[0]?.message?.content || "No response",
});
} catch (error) {
console.error('OpenAI API error:', error);
return NextResponse.json(
{ error: 'Failed to generate response' },
{ status: 500 }
);
}
}
Step 5: Create Chat Component
Create components/Chat.tsx
:
'use client';
import { useState } from 'react';
interface Message {
role: 'user' | 'assistant';
content: string;
}
export default function Chat() {
const [messages, setMessages] = useState<Message[]>([]);
const [input, setInput] = useState('');
const [loading, setLoading] = useState(false);
const sendMessage = async () => {
if (!input.trim()) return;
const userMessage: Message = { role: 'user', content: input };
const newMessages = [...messages, userMessage];
setMessages(newMessages);
setInput('');
setLoading(true);
try {
const response = await fetch('/api/chat', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ messages: newMessages }),
});
const data = await response.json();
if (data.error) {
throw new Error(data.error);
}
setMessages([...newMessages, { role: 'assistant', content: data.message }]);
} catch (error) {
console.error('Chat error:', error);
setMessages([...newMessages, {
role: 'assistant',
content: 'Sorry, I encountered an error. Please try again.'
}]);
} finally {
setLoading(false);
}
};
return (
<div className="max-w-2xl mx-auto p-6">
<div className="border rounded-lg h-96 overflow-y-auto p-4 mb-4">
{messages.map((message, index) => (
<div
key={index}
className={`mb-4 ${
message.role === 'user' ? 'text-right' : 'text-left'
}`}
>
<div
className={`inline-block p-3 rounded-lg max-w-xs ${
message.role === 'user'
? 'bg-blue-500 text-white'
: 'bg-gray-200 text-gray-800'
}`}
>
{message.content}
</div>
</div>
))}
{loading && (
<div className="text-left">
<div className="inline-block p-3 rounded-lg bg-gray-200 text-gray-600">
Thinking...
</div>
</div>
)}
</div>
<div className="flex gap-2">
<input
type="text"
value={input}
onChange={(e) => setInput(e.target.value)}
onKeyPress={(e) => e.key === 'Enter' && sendMessage()}
placeholder="Type your message..."
className="flex-1 p-3 border rounded-lg focus:outline-none focus:ring-2 focus:ring-blue-500"
disabled={loading}
/>
<button
onClick={sendMessage}
disabled={loading || !input.trim()}
className="px-6 py-3 bg-blue-500 text-white rounded-lg hover:bg-blue-600 disabled:bg-gray-300"
>
Send
</button>
</div>
</div>
);
}
Step 6: Use in Page
Create app/page.tsx
:
import Chat from '@/components/Chat';
export default function Home() {
return (
<main className="container mx-auto py-12">
<h1 className="text-4xl font-bold text-center mb-8">
AI Chat with OpenAI
</h1>
<Chat />
</main>
);
}
Step 7: Advanced Features
Text Generation
export async function POST(req: NextRequest) {
const { prompt } = await req.json();
const completion = await openai.chat.completions.create({
model: "gpt-4",
messages: [{ role: "user", content: prompt }],
max_tokens: 500,
});
return NextResponse.json({
text: completion.choices[0]?.message?.content,
});
}
Streaming Responses
export async function POST(req: NextRequest) {
const { messages } = await req.json();
const stream = await openai.chat.completions.create({
model: "gpt-4",
messages,
stream: true,
});
const encoder = new TextEncoder();
const readableStream = new ReadableStream({
async start(controller) {
for await (const chunk of stream) {
const content = chunk.choices[0]?.delta?.content || '';
controller.enqueue(encoder.encode(`data: ${content}\n\n`));
}
controller.close();
},
});
return new Response(readableStream, {
headers: {
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
},
});
}
Summary
Integrating OpenAI with Next.js enables powerful AI features like chat completions, text generation, and streaming responses. Use the latest OpenAI SDK with proper error handling and rate limiting for production applications.