Skip to content

Commit

Permalink
Merge pull request Nutlope#10 from smaeda-ks/smaeda-ks
Browse files Browse the repository at this point in the history
Move parsing logic to server-side
  • Loading branch information
Nutlope authored Jan 22, 2023
2 parents c9d6f71 + b7ed528 commit ab0fdb4
Show file tree
Hide file tree
Showing 4 changed files with 64 additions and 46 deletions.
16 changes: 15 additions & 1 deletion package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
"@heroicons/react": "^2.0.13",
"@tailwindcss/forms": "^0.5.3",
"@vercel/analytics": "^0.1.8",
"eventsource-parser": "^0.0.5",
"framer-motion": "^8.4.3",
"next": "latest",
"react": "18.2.0",
Expand Down
49 changes: 46 additions & 3 deletions pages/api/generate.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
import type { NextRequest } from "next/server";
import {
createParser,
ParsedEvent,
ReconnectInterval,
} from "eventsource-parser";

if (!process.env.OPENAI_API_KEY) {
throw new Error("Missing env var from OpenAI");
Expand All @@ -17,6 +22,9 @@ const handler = async (req: NextRequest): Promise<Response> => {
return new Response("No prompt in the request", { status: 400 });
}

const encoder = new TextEncoder();
const decoder = new TextDecoder();

const payload = {
model: "text-davinci-003",
prompt,
Expand All @@ -38,11 +46,46 @@ const handler = async (req: NextRequest): Promise<Response> => {
body: JSON.stringify(payload),
});

const data = res.body;
let counter = 0;
const stream = new ReadableStream({
async start(controller) {
// callback
function onParse(event: ParsedEvent | ReconnectInterval) {
if (event.type === "event") {
const data = event.data;
// https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
if (data === "[DONE]") {
controller.close();
return;
}
try {
const json = JSON.parse(data);
const text = json.choices[0].text;
if (counter < 2 && (text.match(/\n/) || []).length) {
// this is a prefix character (i.e., "\n\n"), do nothing
return;
}
const queue = encoder.encode(text);
controller.enqueue(queue);
counter++;
} catch (e) {
// maybe parse error
controller.error(e);
}
}
}

return new Response(data, {
headers: { "Content-Type": "application/json; charset=utf-8" },
// stream response (SSE) from OpenAI may be fragmented into multiple chunks
// this ensures we properly read chunks and invoke an event for each SSE event stream
const parser = createParser(onParse);
// https://web.dev/streams/#asynchronous-iteration
for await (const chunk of res.body as any) {
parser.feed(decoder.decode(chunk));
}
},
});

return new Response(stream);
};

export default handler;
44 changes: 2 additions & 42 deletions pages/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -59,51 +59,11 @@ const Home: NextPage = () => {
const decoder = new TextDecoder();

let done = false;
let tempState = "";

while (!done) {
const { value, done: doneReading } = await reader.read();
done = doneReading;
const newValue = decoder
.decode(value)
.replaceAll("data: ", "")
.split("\n\n")
.filter(Boolean);

if (tempState) {
newValue[0] = tempState + newValue[0];
tempState = "";
}

newValue.forEach((newVal) => {
if (newVal === "[DONE]") {
return;
}

try {
const json = JSON.parse(newVal) as {
id: string;
object: string;
created: number;
choices?: {
text: string;
index: number;
logprobs: null;
finish_reason: null | string;
}[];
model: string;
};

if (!json.choices?.length) {
throw new Error("Something went wrong.");
}

const choice = json.choices[0];
setGeneratedBios((prev) => prev + choice.text);
} catch (error) {
tempState = newVal;
}
});
const chunkValue = decoder.decode(value);
setGeneratedBios((prev) => prev + chunkValue);
}

setLoading(false);
Expand Down

0 comments on commit ab0fdb4

Please sign in to comment.