Skip to content

Commit

Permalink
Merge branch 'main' into pr/62
Browse files Browse the repository at this point in the history
  • Loading branch information
steven-tey committed Jul 7, 2023
2 parents afd5a78 + 45f5a4e commit 3aa3579
Show file tree
Hide file tree
Showing 7 changed files with 47 additions and 25 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ You can deploy your own version of Novel to Vercel with one click:
To set up Novel locally, you'll need to clone the repository and set up the following environment variables:

- `OPENAI_API_KEY` – your OpenAI API key (you can get one [here](https://platform.openai.com/account/api-keys))
- `BLOB_READ_WRITE_TOKEN` – your Vercel Blob read/write token (you can get one [here](https://vercel.com/docs/storage/vercel-blob/quickstart#quickstart))
- `BLOB_READ_WRITE_TOKEN` – your Vercel Blob read/write token (currently [still in beta](https://vercel.com/docs/storage/vercel-blob/quickstart#quickstart), but feel free to [DM me on Twitter](https://twitter.com/steventey) for access)

If you've deployed this to Vercel, you can also use [`vc env pull`](https://vercel.com/docs/cli/env#exporting-development-environment-variables) to pull the environment variables from your Vercel project.

Expand Down
8 changes: 2 additions & 6 deletions app/api/generate/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,7 @@ export async function POST(req: Request): Promise<Response> {
}
}

let { prompt: content } = await req.json();

// remove trailing slash,
// slice the content from the end to prioritize later characters
content = content.replace(/\/$/, "").slice(-5000);
let { prompt } = await req.json();

const response = await openai.createChatCompletion({
model: "gpt-3.5-turbo",
Expand All @@ -58,7 +54,7 @@ export async function POST(req: Request): Promise<Response> {
},
{
role: "user",
content,
content: prompt,
},
],
temperature: 0.7,
Expand Down
21 changes: 21 additions & 0 deletions lib/editor.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { EditorView } from "@tiptap/pm/view";
import { Editor } from "@tiptap/core";
import { BlobResult } from "@vercel/blob";
import { toast } from "sonner";

Expand Down Expand Up @@ -115,3 +116,23 @@ export const handleImageUpload = (
}
};
};

export const getPrevText = (
editor: Editor,
{
chars,
offset = 0,
}: {
chars: number;
offset?: number;
},
) => {
// for now, we're using textBetween for now until we can figure out a way to stream markdown text
// with proper formatting: https://github.com/steven-tey/novel/discussions/7
return editor.state.doc.textBetween(
Math.max(0, editor.state.selection.from - chars),
editor.state.selection.from - offset,
"\n",
);
// complete(editor.storage.markdown.getMarkdown());
};
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
"@vercel/analytics": "^1.0.1",
"@vercel/blob": "^0.9.2",
"@vercel/kv": "^0.2.1",
"ai": "^2.1.3",
"ai": "^2.1.15",
"clsx": "^1.2.1",
"eslint": "8.36.0",
"eslint-config-next": "13.2.4",
Expand Down
12 changes: 6 additions & 6 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

11 changes: 7 additions & 4 deletions ui/editor/extensions/slash-command.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import LoadingCircle from "@/ui/icons/loading-circle";
import { toast } from "sonner";
import va from "@vercel/analytics";
import Magic from "@/ui/icons/magic";
import { handleImageUpload } from "@/lib/editor";
import { getPrevText, handleImageUpload } from "@/lib/editor";

interface CommandItemProps {
title: string;
Expand Down Expand Up @@ -285,9 +285,12 @@ const CommandList = ({
});
if (item) {
if (item.title === "Continue writing") {
// we're using this for now until we can figure out a way to stream markdown text with proper formatting: https://github.com/steven-tey/novel/discussions/7
complete(editor.getText());
// complete(editor.storage.markdown.getMarkdown());
complete(
getPrevText(editor, {
chars: 5000,
offset: 1,
}),
);
} else {
command(item);
}
Expand Down
16 changes: 9 additions & 7 deletions ui/editor/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import { toast } from "sonner";
import va from "@vercel/analytics";
import DEFAULT_EDITOR_CONTENT from "./default-content";
import { EditorBubbleMenu } from "./components";
import { getPrevText } from "@/lib/editor";

export default function Editor() {
const [content, setContent] = useLocalStorage(
Expand All @@ -37,18 +38,19 @@ export default function Editor() {
onUpdate: (e) => {
setSaveStatus("Unsaved");
const selection = e.editor.state.selection;
const lastTwo = e.editor.state.doc.textBetween(
selection.from - 2,
selection.from,
"\n",
);
const lastTwo = getPrevText(e.editor, {
chars: 2,
});
if (lastTwo === "++" && !isLoading) {
e.editor.commands.deleteRange({
from: selection.from - 2,
to: selection.from,
});
// we're using this for now until we can figure out a way to stream markdown text with proper formatting: https://github.com/steven-tey/novel/discussions/7
complete(e.editor.getText());
complete(
getPrevText(e.editor, {
chars: 5000,
}),
);
// complete(e.editor.storage.markdown.getMarkdown());
va.track("Autocomplete Shortcut Used");
} else {
Expand Down

0 comments on commit 3aa3579

Please sign in to comment.