ChatGPT bot using Next.js + Tailwind CSS

tzztson
4 min readMar 3, 2023

--

Photo by D koi on Unsplash

As all my friends know, I am very lazy. So I haven’t done anything these days.

When I heard about ChatGPT, I thought it might help me code more easily and comfortably.

I’m not sure how it will help me, but it was surprisingly easy to make. 😁

Let’s see how I made ChatGPT.

After installed Tailwind CSS with Next.js, then let’s make main feature of chatting.

Please replace content of index.tsx as below.

import Head from 'next/head'
import { useState, useRef, useEffect } from 'react'

export default function Home() {


const scrollContainer = useRef(null);

const [messageText, setMessageText] = useState('');

const [isLoading, setIsLoading] = useState(false);

const [userChat, setUserChat] = useState<string[]>([]);
const [botChat, setBotChat] = useState<string[]>([]);


const botResponse = async () => {
setIsLoading(true);
const response = await fetch("/api/generate", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
messageText,
}),
});
console.log("Edge function returned.");

if (!response.ok) {
throw new Error(response.statusText);
}

// This data is a ReadableStream
const data = response.body;
if (!data) {
return;
}

const reader = data.getReader();
const decoder = new TextDecoder();
let done = false;

let botReply = "";

while (!done) {
const { value, done: doneReading } = await reader.read();
done = doneReading;
const botMessage = decoder.decode(value);
botReply += botMessage;
}
console.log(botReply)
setBotChat([...botChat, botReply]);
setIsLoading(false);
}

const handleScroll = (ref: any) => {
ref.scrollTo({
top: ref.scrollHeight,
left: 0,
behavior: "smooth",
});
};

const sendMessage = () => {
botResponse();
setUserChat((messageText.trim().length === 0) ? userChat : [...userChat, messageText]);

setMessageText("");
}

const handleEnterKey = (e: any) => {
if (e.key === 'Enter' && !e.shiftKey) {
sendMessage();
}
}

useEffect(() => {
handleScroll(scrollContainer.current);
}, [messageText, botChat])

return (
<>
<Head>
<title>ChatGPT BOT</title>
<meta name="description" content="Generated by create next app" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<link rel="icon" href="/favicon.ico" />
</Head>
<main>
<h1 className='py-4 text-2xl font-bold text-sky-400 text-center pt-12'>Next.js With ChatGPT</h1>
<p className='py-4 text-xl font-bold text-sky-400 text-center'>If you discover any issue, please feel free to contact me.</p>
<div className='bg-sky-100'>
<div className='container mx-auto px-12 max-sm:px-6 py-6 overflow-auto h-[72vh] chat-container' ref={scrollContainer}>
{userChat.map((ele, key) => {
return (
<div key={`blockchat-${key}`}>
<div key={`userchat-${key}`} className='flex flex-col gap-2 items-end justify-center'>
<div className='bg-[#efffde] rounded-2xl px-6 py-2 max-w-[50%] break-words'>{ele}</div>
</div>
{botChat[key] && <div key={`botchat-${key}`} className='flex flex-col gap-2 items-start justify-center break-words'>
<div className='bg-white rounded-2xl px-6 py-2 max-w-[50%]'>{botChat[key]}</div>
</div>}
</div>
)
})}
{isLoading && <div className="lds-ellipsis"><div></div><div></div><div></div><div></div></div>}
</div>
</div>

<div className='container mx-auto px-12 max-sm:px-2 flex justify-center '>
<div className="relative w-1/2 flex items-start py-6 max-xl:w-full flex justify-center max-md:flex-col max-md:items-center gap-4">
<textarea value={messageText} onChange={e => setMessageText(e.target.value)} onKeyUp={handleEnterKey}
className="outline-none bg-sky-50 border border-sky-300 text-sky-900 w-full h-14 px-6 py-3"
placeholder="PLEASE TYPE YOUR TEXT HERE ..." />
<button className='bg-sky-500 rounded-full text-white text-3xl font-black px-6 py-2 active:translate-y-1' onClick={sendMessage}>
Send
</button>
</div>

</div>
</main>
</>
)
}

Let’s make generate.ts in the api folder.

import { OpenAIStream, OpenAIStreamPayload } from "../../utils/OpenAIStream";

type RequestData = {
messageText: string;
};


if (!process.env.OPENAI_API_KEY) {
throw new Error("Missing env var from OpenAI");
}

export const config = {
runtime: "edge",
};

let message_junk = "";

const handler = async (req: Request): Promise<Response> => {

const { messageText } = (await req.json()) as RequestData;

message_junk += `${messageText} \n` ;

if (!messageText) {
return new Response("No prompt in the request", { status: 400 });
}

const payload: OpenAIStreamPayload = {
model: "text-davinci-003",
prompt: message_junk,
temperature: 0.7,
top_p: 1,
frequency_penalty: 0,
presence_penalty: 0,
max_tokens: 1000,
stream: true,
n: 1,
};

const stream = await OpenAIStream(payload);
return new Response(stream);
};

export default handler;

Finally, Let’s make OpenAIStream.ts in the utils folder.

import {
createParser,
ParsedEvent,
ReconnectInterval,
} from "eventsource-parser";

export interface OpenAIStreamPayload {
model: string;
prompt: string;
temperature: number;
top_p: number;
frequency_penalty: number;
presence_penalty: number;
max_tokens: number;
stream: boolean;
n: number;
}

export async function OpenAIStream(payload: OpenAIStreamPayload) {
const encoder = new TextEncoder();
const decoder = new TextDecoder();

let counter = 0;

const res = await fetch("https://api.openai.com/v1/completions", {
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${process.env.OPENAI_API_KEY ?? ""}`,
},
method: "POST",
body: JSON.stringify(payload),
});

const stream = new ReadableStream({
async start(controller) {
// callback
function onParse(event: ParsedEvent | ReconnectInterval) {
if (event.type === "event") {
const data = event.data;
// https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
if (data === "[DONE]") {
controller.close();
return;
}
try {
const json = JSON.parse(data);
const text = json.choices[0].text;
if (counter < 2 && (text.match(/\n/) || []).length) {
// this is a prefix character (i.e., "\n\n"), do nothing
return;
}
const queue = encoder.encode(text);
controller.enqueue(queue);
counter++;
} catch (e) {
// maybe parse error
controller.error(e);
}
}
}

// stream response (SSE) from OpenAI may be fragmented into multiple chunks
// this ensures we properly read chunks and invoke an event for each SSE event stream
const parser = createParser(onParse);
// https://web.dev/streams/#asynchronous-iteration
for await (const chunk of res.body as any) {
parser.feed(decoder.decode(chunk));
}
},
});

return stream;
}

You can also find this resource from my github.
https://github.com/tzztson/chatgpt-next.js

Thank you for your attention.

--

--

tzztson
tzztson

Written by tzztson

Web Developer, Researcher of Trend Web & Blockchain, Veteran, Chess Player(Class A) https://github.com/tzztson

No responses yet