Translating Node.js to Next.js

Node.JS Code

import type { NextApiRequest, NextApiResponse } from "next";
import OpenAI from "openai";

const openai = new OpenAI();

export async function POST(req: NextRequest) {

    try {
        const { content } = req.body;
        if (!content) {
            res.status(400).json({ error: "Content is required" });
            return;
        }

        const assistant = await openai.beta.assistants.create({
            name: "Math Tutor",
            instructions:
                "You are a personal math tutor. Write and run code to answer math questions.",
            tools: [{ type: "code_interpreter" }],
            model: "gpt-4o",
        });

        const thread = await openai.beta.threads.messages.create(assistant.id, {
            role: "user",
            content,
        });

        const run = openai.beta.threads.runs.stream(thread.id, {
            assistant_id: assistant.id,
        });

        let responseText = "";

        run.on("textDelta", (textDelta) => {
            responseText += textDelta.value;
        })
            .on("end", () => {
                res.status(200).json({ response: responseText });
            })
            .on("error", (error) => {
                console.error("Error during streaming:", error);
                res.status(500).json({ error: "Internal server error" });
            });
    } catch (error) {
        console.error("Error:", error);
        res.status(500).json({ error: "Internal server error" });
    }
}

Next.JS Code


import { NextRequest, NextResponse } from "next/server";
import OpenAI from "openai";

const openai = new OpenAI();

export async function POST(req: NextRequest) {
    try {
        const { content } = await req.json();

        if (!content) {
            return NextResponse.json(
                { error: "User input is required" },
                { status: 400 },
            );
        }

        // Create an assistant
        const assistant = await openai.beta.assistants.create({
            name: "Math Tutor",
            instructions:
                "You are a personal math tutor. Write and run code to answer math questions.",
            tools: [{ type: "code_interpreter" }],
            model: "gpt-4o",
        });

        // Create a thread with the assistant
        const thread = await openai.beta.threads.messages.create(assistant.id, {
            role: "user",
            content,
        });

        // Stream the response using a ReadableStream
        const readableStream = new ReadableStream({
            start(controller) {
                const run = openai.beta.threads.runs.stream(thread.id, {
                    assistant_id: assistant.id,
                });

                run.on("textDelta", (textDelta) => {
                    controller.enqueue(textDelta.value); // Stream chunks of data
                });

                run.on("end", () => {
                    controller.close();
                });

                run.on("error", (error) => {
                    console.error("Error during streaming:", error);
                    controller.error(error);
                });
            },
        });

        return new NextResponse(readableStream, {
            headers: { "Content-Type": "text/plain" }, // Set appropriate headers
        });
    } catch (error) {
        console.error("Error:", error);
        return NextResponse.json(
            { error: "Internal server error" },
            { status: 500 },
        );
    }
}

What to look for when updating code for various javascript uses:

First update the imports, then

import { NextRequest, NextResponse } from "next/server";
  1. Replaced res with NextResponse:

    • Use NextResponse.json for JSON responses.

    • For streaming, use ReadableStream and NextResponse together.

  2. Parsed Body with req.json:

    • Replaced req.body with await req.json() to correctly handle the request payload.
  3. Handled Streaming Properly:

    • Used a ReadableStream to stream the response data back to the client.

    • Enqueued data chunks as they are received from the run object.

  4. Improved Error Handling:

    • Added return statements after error responses to ensure the function exits cleanly.