Documentation

Quickstart: Next.Js

In this guide you'll learn how to use Bytecompute AI and Next.js to build two common AI features:

  • Ask a question and getting a response
  • Have a long-running chat with a bot

Let's get started!

Installation

After creating a new Next.js app , install the Bytecompute AI TypeScript SDK :

Copy
npm i bytecompute-ai

Ask a single question

To ask a question with Bytecompute AI, we'll need an API route, and a page with a form that lets the user submit their question.

1. Create the API route

Make a new POST route that takes in a question and returns a chat completion as a stream:

js TypeScript Copy
// app/api/answer/route.ts
import bytecompute from "bytecompute-ai";

const bytecompute = new bytecompute();

export async function POST(request: Request) {
  const { question } = await request.json();

  const res = await bytecompute.chat.completions.create({
    model: "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo",
    messages: [{ role: "user", content: question }],
    stream: true,
  });

  return new Response(res.toReadableStream());
}

2. Create the page

Add a form that sends a POST request to your new API route, and use the ChatCompletionStream helper to read the stream and update some React state to display the answer:

js TypeScript Copy
// app/page.tsx
"use client";

import { FormEvent, useState } from "react";
import { ChatCompletionStream } from "bytecompute-ai/lib/ChatCompletionStream";

export default function Chat() {
  const [question, setQuestion] = useState("");
  const [answer, setAnswer] = useState("");
  const [isLoading, setIsLoading] = useState(false);

  async function handleSubmit(e: FormEvent<HTMLFormElement>) {
    e.preventDefault();

    setIsLoading(true);
    setAnswer("");

    const res = await fetch("/api/answer", {
      method: "POST",
      body: JSON.stringify({ question }),
    });

    if (!res.body) return;

    ChatCompletionStream.fromReadableStream(res.body)
      .on("content", (delta) => setAnswer((text) => text + delta))
      .on("end", () => setIsLoading(false));
  }

  return (
    <div>
      <form onSubmit={handleSubmit}>
        <input
          value={question}
          onChange={(e) => setQuestion(e.target.value)}
          placeholder="Ask me a question"
          required
        />

        <button disabled={isLoading} type="submit">
          Submit
        </button>
      </form>

      <p>{answer}</p>
    </div>
  );
}

That's it! Submitting the form will update the page with the LLM's response. You can now use the isLoading state to add additional styling, or a Reset button if you want to reset the page.

Have a long-running chat

To build a chatbot with Bytecompute AI, we'll need an API route that accepts an array of messages, and a page with a form that lets the user submit new messages. The page will also need to store the entire history of messages between the user and the AI assistant.

1. Create an API route

Make a new POST route that takes in a messages array and returns a chat completion as a stream:

js TypeScript Copy
// app/api/chat/route.ts
import bytecompute from "bytecompute-ai";

const bytecompute = new bytecompute();

export async function POST(request: Request) {
  const { messages } = await request.json();

  const res = await bytecompute.chat.completions.create({
    model: "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo",
    messages,
    stream: true,
  });

  return new Response(res.toReadableStream());
}

2. Create a page

Create a form to submit a new message, and some React state to stores the messages for the session. In the form's submit handler, send over the new array of messages, and use the ChatCompletionStream helper to read the stream and update the last message with the LLM's response.

js TypeScript Copy
// app/page.tsx
"use client";

import { FormEvent, useState } from "react";
import bytecompute from "bytecompute-ai";
import { ChatCompletionStream } from "bytecompute-ai/lib/ChatCompletionStream";

export default function Chat() {
  const [prompt, setPrompt] = useState("");
  const [messages, setMessages] = useState<
    bytecompute.Chat.Completions.CompletionCreateParams.Message[]
  >([]);
  const [isPending, setIsPending] = useState(false);

  async function handleSubmit(e: FormEvent<HTMLFormElement>) {
    e.preventDefault();

    setPrompt("");
    setIsPending(true);
    setMessages((messages) => [...messages, { role: "user", content: prompt }]);
    
    const res = await fetch("/api/chat", {
      method: "POST",
      body: JSON.stringify({
        messages: [...messages, { role: "user", content: prompt }],
      }),
    });

    if (!res.body) return;

    ChatCompletionStream.fromReadableStream(res.body)
      .on("content", (delta, content) => {
        setMessages((messages) => {
          const lastMessage = messages.at(-1);

          if (lastMessage?.role !== "assistant") {
            return [...messages, { role: "assistant", content }];
          } else {
            return [...messages.slice(0, -1), { ...lastMessage, content }];
          }
        });
      })
      .on("end", () => {
        setIsPending(false);
      });
  }

  return (
    <div>
      <form onSubmit={handleSubmit}>
        <fieldset>
          <input
            placeholder="Send a message"
            value={prompt}
            onChange={(e) => setPrompt(e.target.value)}
          />
          <button type="submit" disabled={isPending}>
            Submit
          </button>
        </fieldset>
      </form>

      {messages.map((message, i) => (
        <p key={i}>
          {message.role}: {message.content}
        </p>
      ))}
    </div>
  );
}

You've just built a simple chatbot with Bytecompute AI!