feat: add stream support

This commit is contained in:
Yidadaa
2023-03-11 20:54:24 +08:00
parent 4f10b9a60f
commit 74dd6194d8
8 changed files with 570 additions and 23 deletions

View File

@@ -0,0 +1,64 @@
import type { ChatRequest } from "../chat/typing";
import { createParser } from "eventsource-parser";
import { NextRequest } from "next/server";
const apiKey = process.env.OPENAI_API_KEY;
async function createStream(payload: ChatRequest) {
const encoder = new TextEncoder();
const decoder = new TextDecoder();
console.log("[ChatStream]", payload);
const res = await fetch("https://api.openai.com/v1/chat/completions", {
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
},
method: "POST",
body: JSON.stringify(payload),
});
const stream = new ReadableStream({
async start(controller) {
function onParse(event: any) {
if (event.type === "event") {
const data = event.data;
// https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
if (data === "[DONE]") {
controller.close();
return;
}
try {
const json = JSON.parse(data);
const text = json.choices[0].delta.content;
const queue = encoder.encode(text);
controller.enqueue(queue);
} catch (e) {
controller.error(e);
}
}
}
const parser = createParser(onParse);
for await (const chunk of res.body as any) {
parser.feed(decoder.decode(chunk));
}
},
});
return stream;
}
export async function POST(req: NextRequest) {
try {
const body = (await req.json()) as ChatRequest;
const stream = await createStream(body);
return new Response(stream);
} catch (error) {
console.error(error);
}
}
export const config = {
runtime: "edge",
};

View File

@@ -2,8 +2,7 @@ import { OpenAIApi, Configuration } from "openai";
import { ChatRequest } from "./typing";
const isProd = process.env.NODE_ENV === "production";
let apiKey = process.env.OPENAI_API_KEY;
const apiKey = process.env.OPENAI_API_KEY;
const openai = new OpenAIApi(
new Configuration({

View File

@@ -1,7 +1,10 @@
"use client";
import { useState, useRef, useLayoutEffect, useEffect } from "react";
import { useState, useRef, useEffect } from "react";
import ReactMarkdown from "react-markdown";
import "katex/dist/katex.min.css";
import RemarkMath from "remark-math";
import RehypeKatex from "rehype-katex";
import { IconButton } from "./button";
import styles from "./home.module.css";
@@ -19,6 +22,14 @@ import LoadingIcon from "../icons/three-dots.svg";
import { Message, useChatStore } from "../store";
export function Markdown(props: { content: string }) {
return (
<ReactMarkdown remarkPlugins={[RemarkMath]} rehypePlugins={[RehypeKatex]}>
{props.content}
</ReactMarkdown>
);
}
export function Avatar(props: { role: Message["role"] }) {
if (props.role === "assistant") {
return <BotIcon className={styles["user-avtar"]} />;
@@ -174,11 +185,12 @@ export function Chat() {
<div className={styles["chat-message-status"]}></div>
)}
<div className={styles["chat-message-item"]}>
{message.preview && !isUser ? (
{(message.preview || message.content.length === 0) &&
!isUser ? (
<LoadingIcon />
) : (
<div className="markdown-body">
<ReactMarkdown>{message.content}</ReactMarkdown>
<Markdown content={message.content} />
</div>
)}
</div>

View File

@@ -1,8 +1,8 @@
import type { ChatRequest, ChatReponse } from "./api/chat/typing";
import { Message } from "./store";
export async function requestChat(messages: Message[]) {
const req: ChatRequest = {
const makeRequestParam = (messages: Message[], stream = false): ChatRequest => {
return {
model: "gpt-3.5-turbo",
messages: messages
.map((v) => ({
@@ -10,7 +10,12 @@ export async function requestChat(messages: Message[]) {
content: v.content,
}))
.filter((m) => m.role !== "assistant"),
stream,
};
};
export async function requestChat(messages: Message[]) {
const req: ChatRequest = makeRequestParam(messages);
const res = await fetch("/api/chat", {
method: "POST",
@@ -23,6 +28,45 @@ export async function requestChat(messages: Message[]) {
return (await res.json()) as ChatReponse;
}
export async function requestChatStream(
messages: Message[],
options?: {
onMessage: (message: string, done: boolean) => void;
}
) {
const req = makeRequestParam(messages, true);
const res = await fetch("/api/chat-stream", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(req),
});
let responseText = "";
if (res.ok) {
const reader = res.body?.getReader();
const decoder = new TextDecoder();
while (true) {
const content = await reader?.read();
const text = decoder.decode(content?.value);
responseText += text;
const done = !content || content.done;
options?.onMessage(responseText, false);
if (done) {
break;
}
}
options?.onMessage(responseText, true);
}
}
export async function requestWithPrompt(messages: Message[], prompt: string) {
messages = messages.concat([
{

View File

@@ -2,11 +2,12 @@ import { create } from "zustand";
import { persist } from "zustand/middleware";
import { type ChatCompletionResponseMessage } from "openai";
import { requestChat, requestWithPrompt } from "./requests";
import { requestChat, requestChatStream, requestWithPrompt } from "./requests";
import { trimTopic } from "./utils";
export type Message = ChatCompletionResponseMessage & {
date: string;
streaming?: boolean;
};
interface ChatConfig {
@@ -65,6 +66,11 @@ interface ChatStore {
summarizeSession: () => void;
updateStat: (message: Message) => void;
updateCurrentSession: (updater: (session: ChatSession) => void) => void;
updateMessage: (
sessionIndex: number,
messageIndex: number,
updater: (message?: Message) => void
) => void;
}
export const useChatStore = create<ChatStore>()(
@@ -141,12 +147,42 @@ export const useChatStore = create<ChatStore>()(
const messages = get().currentSession().messages.concat(message);
get().onNewMessage(message);
const res = await requestChat(messages);
get().onNewMessage({
...res.choices[0].message!,
const botMessage: Message = {
content: "",
role: "assistant",
date: new Date().toLocaleString(),
streaming: true,
};
get().updateCurrentSession((session) => {
session.messages.push(botMessage);
});
requestChatStream(messages, {
onMessage(content, done) {
if (done) {
get().updateStat(message);
get().summarizeSession();
} else {
botMessage.content = content;
botMessage.streaming = false;
set(() => ({}));
}
},
});
},
updateMessage(
sessionIndex: number,
messageIndex: number,
updater: (message?: Message) => void
) {
const sessions = get().sessions;
const session = sessions.at(sessionIndex);
const messages = session?.messages;
console.log(sessions, messages?.length, messages?.at(messageIndex));
updater(messages?.at(messageIndex));
set(() => ({ sessions }));
},
onBotResponse(message) {