Skip to content

Commit

Permalink
Docs follow-ups for ai/prompts (vercel#346)
Browse files Browse the repository at this point in the history
  • Loading branch information
MaxLeiter authored Jul 18, 2023
1 parent 9320e95 commit 8929246
Show file tree
Hide file tree
Showing 6 changed files with 15 additions and 33 deletions.
4 changes: 2 additions & 2 deletions docs/pages/docs/api-reference.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -25,5 +25,5 @@ title: API Reference

## Prompt Construction Helpers

- [`buildOpenAssistantPrompt`](./api-reference/prompts#experimental_buildopenassistantprompt)
- [`buildStarChatBetaPrompt`](./api-reference/prompts#experimental_buildstarchatbetaprompt)
- [`experimental_buildOpenAssistantPrompt`](./api-reference/prompts#experimental_buildopenassistantprompt)
- [`experimental_buildStarChatBetaPrompt`](./api-reference/prompts#experimental_buildstarchatbetaprompt)
2 changes: 1 addition & 1 deletion docs/pages/docs/api-reference/prompts.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ const response = Hf.textGenerationStream({
Uses `<|user|>`, `<|end|>`, `<|system|>`, and `<|assistant>` tokens. If a `Message` with an unsupported `role` is passed, an error will be thrown.

```ts filename="route.ts" {6}
import { buildStarChatBetaPrompt } from 'ai/prompts'
import { experimental_buildStarChatBetaPrompt } from 'ai/prompts'

const { messages } = await req.json()
const response = Hf.textGenerationStream({
Expand Down
22 changes: 2 additions & 20 deletions docs/pages/docs/guides/huggingface.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -42,40 +42,22 @@ For this example, we'll create a route handler at `app/api/chat/route.ts` that a
```tsx filename="app/api/chat/route.ts" showLineNumbers
import { HfInference } from '@huggingface/inference'
import { HuggingFaceStream, StreamingTextResponse } from 'ai'
import { experimental_buildOpenAssistantPrompt } from 'ai/prompts'

// Create a new Hugging Face Inference instance
const Hf = new HfInference(process.env.HUGGINGFACE_API_KEY)

// IMPORTANT! Set the runtime to edge
export const runtime = 'edge'

// Build a prompt from the messages
// Note: this is specific to the OpenAssistant model we're using
// @see https://huggingface.co/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5#prompting
function buildOpenAssistantPrompt(
messages: { content: string; role: 'system' | 'user' | 'assistant' }[]
) {
return (
messages
.map(({ content, role }) => {
if (role === 'user') {
return `<|prompter|>${content}<|endoftext|>`
} else {
return `<|assistant|>${content}<|endoftext|>`
}
})
.join('') + '<|assistant|>'
)
}

export async function POST(req: Request) {
// Extract the `messages` from the body of the request
const { messages } = await req.json()

// Initialize a text-generation stream using the Hugging Face Inference SDK
const response = await Hf.textGenerationStream({
model: 'OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5',
inputs: buildOpenAssistantPrompt(messages),
inputs: experimental_buildOpenAssistantPrompt(messages),
parameters: {
max_new_tokens: 200,
// @ts-ignore (this is a valid parameter specifically in OpenAssistant models)
Expand Down
4 changes: 2 additions & 2 deletions examples/next-huggingface/app/api/chat/route.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { HfInference } from '@huggingface/inference'
import { HuggingFaceStream, StreamingTextResponse } from 'ai'
import { buildOpenAssistantPrompt } from 'ai/prompts'
import { experimental_buildOpenAssistantPrompt } from 'ai/prompts'

// Create a new HuggingFace Inference instance
const Hf = new HfInference(process.env.HUGGINGFACE_API_KEY)
Expand All @@ -14,7 +14,7 @@ export async function POST(req: Request) {

const response = Hf.textGenerationStream({
model: 'OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5',
inputs: buildOpenAssistantPrompt(messages),
inputs: experimental_buildOpenAssistantPrompt(messages),
parameters: {
max_new_tokens: 200,
// @ts-ignore (this is a valid parameter specifically in OpenAssistant models)
Expand Down
12 changes: 6 additions & 6 deletions packages/core/prompts/huggingface.test.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import {
buildOpenAssistantPrompt,
buildStarChatBetaPrompt
experimental_buildOpenAssistantPrompt,
experimental_buildStarChatBetaPrompt
} from './huggingface'
import type { Message } from '../shared/types'

Expand All @@ -13,15 +13,15 @@ describe('buildStarChatBetaPrompt', () => {
]

const expectedPrompt = `<|system|>\nYou are a chat bot.<|end|>\n<|user|>\nHello!<|end|>\n<|assistant|>\nHi there!<|end|>\n<|assistant|>`
const prompt = buildStarChatBetaPrompt(messages)
const prompt = experimental_buildStarChatBetaPrompt(messages)
expect(prompt).toEqual(expectedPrompt)
})

it('should throw an error if a function message is included', () => {
const messages: Pick<Message, 'content' | 'role'>[] = [
{ content: 'someFunction()', role: 'function' }
]
expect(() => buildStarChatBetaPrompt(messages)).toThrow()
expect(() => experimental_buildStarChatBetaPrompt(messages)).toThrow()
})
})

Expand All @@ -34,14 +34,14 @@ describe('buildOpenAssistantPrompt', () => {

const expectedPrompt =
'<|prompter|>Hello!<|endoftext|><|assistant|>Hi there!<|endoftext|><|assistant|>'
const prompt = buildOpenAssistantPrompt(messages)
const prompt = experimental_buildOpenAssistantPrompt(messages)
expect(prompt).toEqual(expectedPrompt)
})

it('should throw an error if a function message is included', () => {
const messages: Pick<Message, 'content' | 'role'>[] = [
{ content: 'someFunction()', role: 'function' }
]
expect(() => buildOpenAssistantPrompt(messages)).toThrow()
expect(() => experimental_buildOpenAssistantPrompt(messages)).toThrow()
})
})
4 changes: 2 additions & 2 deletions packages/core/prompts/huggingface.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import { Message } from '../shared/types'
* Does not support `function` messages.
* @see https://huggingface.co/HuggingFaceH4/starchat-beta
*/
export function buildStarChatBetaPrompt(
export function experimental_buildStarChatBetaPrompt(
messages: Pick<Message, 'content' | 'role'>[]
) {
return (
Expand All @@ -30,7 +30,7 @@ export function buildStarChatBetaPrompt(
* Does not support `function` or `system` messages.
* @see https://huggingface.co/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5
*/
export function buildOpenAssistantPrompt(
export function experimental_buildOpenAssistantPrompt(
messages: Pick<Message, 'content' | 'role'>[]
) {
return (
Expand Down

0 comments on commit 8929246

Please sign in to comment.