diff --git a/pages/api/lambda.ts b/pages/api/lambda.ts new file mode 100644 index 00000000..335d0fac --- /dev/null +++ b/pages/api/lambda.ts @@ -0,0 +1,39 @@ +import { NextApiRequest, NextApiResponse } from "next"; + +if (!process.env.OPENAI_API_KEY) { + throw new Error("Missing env var from OpenAI"); +} + +const handler = async (req: NextApiRequest, res: NextApiResponse) => { + const prompt = req.body.prompt; + + if (!prompt) { + return new Response("No prompt in the request", { status: 400 }); + } + + const payload = { + model: "text-davinci-003", + prompt, + temperature: 0.7, + top_p: 1, + frequency_penalty: 0, + presence_penalty: 0, + max_tokens: 1000, + n: 1, + }; + + const response = await fetch("https://api.openai.com/v1/completions", { + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${process.env.OPENAI_API_KEY ?? ""}`, + }, + method: "POST", + body: JSON.stringify(payload), + }); + + const data = response.body; + console.log("data", data); + return res.status(200).json(data); +}; + +export default handler;