diff --git a/docs/docs/contributing/index.md b/docs/docs/contributing/index.md index 23cccf4..d1a89eb 100644 --- a/docs/docs/contributing/index.md +++ b/docs/docs/contributing/index.md @@ -46,7 +46,7 @@ changeset publish ## Add a monorepo package to the main convostack package -All `@convostack/*` packages live in the `packages/` directory and follow the `convostack-package-name` folder naming +All `convostack/*` packages live in the `packages/` directory and follow the `convostack-package-name` folder naming convention. To add a new package, reference one of the many existing packages. Just because a package has been defined in the `packages/` directory does NOT mean that it will automatically available @@ -55,14 +55,14 @@ in the main `convostack` NPM package. In order to add a package to the main `convostack` package, you must: 1. Add a new folder with the following naming convention to the `packages/convostack/src` folder. For example, for a - package named `@convostack/example-subpackage`, you would create the + package named `convostack/example-subpackage`, you would create the directory `packages/convostack/src/example-subpackage` and a corresponding `index.ts` file `packages/convostack/src/example-subpackage/index.ts` that would serve to re-export your original package from within the parent `convostack` package. The `index.ts` file should only contain one - line: `export * from '@convostack/example-subpackage';` + line: `export * from 'convostack/example-subpackage';` 2. Add your package to the dependencies of `convostack`. Using the example from above, you would - add `"@convostack/example-subpackage": "*"` to `packages/convostack/package.json`'s `dependencies`. Please note that - for all of this to work for end users, you must publicly publish the `@convostack/example-subpackage` package to NPM, + add `"convostack/example-subpackage": "*"` to `packages/convostack/package.json`'s `dependencies`. Please note that + for all of this to work for end users, you must publicly publish the `convostack/example-subpackage` package to NPM, since `convostack` does not actually bundle these dependencies internally. 3. To ensure that the entrypoints for imports are properly generated, you must also add your package to the `entrypoints` defined in the `packages/convostack/scripts/create-entrypoints.js` file. Using the example from diff --git a/docs/docs/getting-started/index.md b/docs/docs/getting-started/index.md new file mode 100644 index 0000000..08c0ce6 --- /dev/null +++ b/docs/docs/getting-started/index.md @@ -0,0 +1,188 @@ +--- +id: "index" +title: "Quickstart" +sidebar_label: "Quickstart" +sidebar_position: 0.1 +--- + +# Quickstart Guide + +This tutorial gives you a walkthrough on how to quickly connect AI agents to the ConvoStack chatbot playground. + +We will be using **Langchain** for creating the AI agents and **ConvoStack** for connecting these agents to a production-ready chatbot playground. + +![ConvoStack Playground](../../static/img/dev-playground.png) + +## Installation + +```bash +npm install convostack langchain +``` + +## Example 1: OpenAI Agent + +In this example, we are connecting an OpenAI [LLM](https://js.langchain.com/docs/modules/models/llms/) to the chatbot playground. + +```typescript +import { playground } from "convostack/playground"; +import { OpenAI } from "langchain/llms/openai"; + +playground({ + reply(context: IAgentContext): Promise { + // `humanMessage` is the content of each message the user sends via the chatbot playground. + let humanMessage = context.getHumanMessage().content; + // `agent` is the OpenAI agent we want to use to respond to each `humanMessage` + const agent = new OpenAI(); + // `call` is a simple string-in, string-out method for interacting with the OpenAI agent. + const resp = await model.call(humanMessage); + // `resp` is the generated agent's response to the user's `humanMessage` + return { + content: resp, + contentType: "markdown", + }; + }, +}); +``` + +**See the code above in action:** + +![ConvoStack Quickstart Example 1](../../static/img/ex1.png) + +## Example 2: LLM Chain + +In this example, we are constructing an [LLMChain](https://js.langchain.com/docs/modules/chains/llm_chain) which takes a human message from the chatbot playground, formats it with a [PromptTemplate](https://js.langchain.com/docs/modules/prompts/prompt_templates/), and then passes the formatted response to an OpenAI agent. + +The generated response of the agent will be streamed to the user via the chatbot playground. + +```typescript +import { playground } from "convostack/playground"; +import { + ChatPromptTemplate, + HumanMessagePromptTemplate, + SystemMessagePromptTemplate, +} from "langchain/prompts"; +import { LLMChain } from "langchain/chains"; +import { ChatOpenAI } from "langchain/chat_models/openai"; + +playground({ + reply(context: IAgentContext): Promise { + // `humanMessage` is the content of each message the user sends via the chatbot playground. + let humanMessage = context.getHumanMessage().content; + // We can now construct an LLMChain from a ChatPromptTemplate and a chat model. + const chat = new ChatOpenAI({ streaming: true, temperature: 0 }); + // Pre-prompt the agent to be a language translator + const chatPrompt = ChatPromptTemplate.fromPromptMessages([ + SystemMessagePromptTemplate.fromTemplate( + "You are a helpful assistant that translates {input_language} to {output_language}." + ), + HumanMessagePromptTemplate.fromTemplate("{text}"), + ]); + const chain = new LLMChain({ + prompt: chatPrompt, + llm: chat, + }); + + // `resp` is the response of the OpenAI LLM chain translating `humanMessage` from English to French. + const resp = await chain.call({ + input_language: "English", + output_language: "French", + text: humanMessage, + }); + + return { + content: resp.text, + contentType: "markdown", + }; + }, +}); +``` + +**See the code above in action:** + +![ConvoStack Quickstart Example 2](../../static/img/ex2.png) + +## Example 3: LLM Chain With History + +In this example, we are connecting an OpenAI [LLM](https://js.langchain.com/docs/modules/models/llms/) that remembers the previous conversational back and forths directly using [Buffer Memory](https://js.langchain.com/docs/modules/memory/examples/buffer_memory) and `ConvoStackLangchainChatMessageHistory`. + +The generated response of the agent will be streamed to the user via the chatbot playground. + +```typescript +import { playground } from "convostack/playground"; +import { ConvoStackLangchainChatMessageHistory } from "convostack/langchain-memory"; +import { ChatOpenAI } from "langchain/chat_models/openai"; +import { + SystemMessagePromptTemplate, + HumanMessagePromptTemplate, + ChatPromptTemplate, + MessagesPlaceholder, +} from "langchain/prompts"; +import { ConversationChain } from "langchain/chains"; +import { BufferMemory } from "langchain/memory"; + +playground({ + reply( + context: IAgentContext, + callbacks?: IAgentCallbacks + ): Promise { + // `humanMessage` is the content of each message the user sends via the chatbot playground. + let humanMessage = context.getHumanMessage().content; + + // Create a new OpenAI agent, with streaming + const chat = new ChatOpenAI({ + modelName: "gpt-3.5-turbo", + temperature: 0, + streaming: true, + callbacks: [ + { + handleLLMNewToken(token: string) { + // Stream tokens to ConvoStack + callbacks.onMessagePart({ + contentChunk: token, + }); + }, + }, + ], + }); + + // Setup your prompts (note the placeholder for {history}) + const chatPrompt = ChatPromptTemplate.fromPromptMessages([ + SystemMessagePromptTemplate.fromTemplate( + "The following is a friendly conversation between a human and an AI." + ), + new MessagesPlaceholder("history"), + HumanMessagePromptTemplate.fromTemplate("{input}"), + ]); + + // Setup the chain with a BufferMemory that pulls from the ConvoStack conversation history + const chain = new ConversationChain({ + memory: new BufferMemory({ + // Use the ConvoStackLangchainChatMessageHistory class to prepare a Langchain-compatible version of the history + chatHistory: new ConvoStackLangchainChatMessageHistory({ + // Pass the current conversation's message history for loading + history: context.getHistory(), + }), + returnMessages: true, + memoryKey: "history", + }), + prompt: chatPrompt, + llm: chat, + }); + + // `resp` is the response of the OpenAI LLM chain to `humanMessage`, which was inputted on the ConvoStack playground. + const resp = await chain.call({ + input: context.getHumanMessage().content, + }); + + // Send the final response to ConvoStack + return { + content: resp.response, + contentType: "markdown", + }; + }, +}); +``` + +**See the code above in action:** + +![ConvoStack Quickstart Example 3](../../static/img/ex3.png) diff --git a/docs/docs/getting-started/quickstart-react-express-playground.md b/docs/docs/getting-started/quickstart-react-express-playground.md index c7b107e..848c16b 100644 --- a/docs/docs/getting-started/quickstart-react-express-playground.md +++ b/docs/docs/getting-started/quickstart-react-express-playground.md @@ -1,6 +1,8 @@ -# Quickstart Guide +--- +sidebar_position: 0.2 +--- -The ConvoStack Playground monorepo is the fastest way to start exploring ConvoStack. If you like learning by example, then this is the quickstart for you! +# Playground Repository To check out the playground without setting it up yourself, click [here](https://playground.convostack.ai) for a live demo! diff --git a/docs/sidebars.js b/docs/sidebars.js index f2b9150..3b562f1 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -13,79 +13,79 @@ /** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ const sidebars = { - // By default, Docusaurus generates a sidebar from the docs folder structure - tutorialSidebar: [ - "README", - "the-basics", - { - type: "category", - label: "Getting Started", - link: { - type: "doc", - id: "getting-started/quickstart-react-express-playground", - }, - items: [{ type: "autogenerated", dirName: "getting-started" }], - }, - { - type: "category", - label: "Frontend", - link: { - type: "doc", - id: "frontend/index", - }, - items: [{ type: "autogenerated", dirName: "frontend" }], - }, - { - type: "category", - label: "Backend", - link: { - type: "doc", - id: "backend/index", - }, - items: [{ type: "autogenerated", dirName: "backend" }], - }, - { - type: "category", - label: "Production", - link: { - type: "doc", - id: "production/index", - }, - items: [{ type: "autogenerated", dirName: "production" }], - }, - { - type: "category", - label: "TS/JS API Reference", - link: { - type: "doc", - id: "ts-js-api/index", - }, - items: [{ type: "autogenerated", dirName: "ts-js-api" }], - }, - { - type: "category", - label: "GraphQL API Examples", - items: [{ type: "autogenerated", dirName: "graphql-api-examples" }], - }, - { - type: "category", - label: "GraphQL API Reference", - link: { - type: "doc", - id: "graphql-api/index", - }, - items: [{ type: "autogenerated", dirName: "graphql-api" }], - }, - { - type: "category", - label: "Contributing", - link: { - type: "doc", - id: "contributing/index", - }, - items: [{ type: "autogenerated", dirName: "contributing" }], - }, - ], + // By default, Docusaurus generates a sidebar from the docs folder structure + tutorialSidebar: [ + "README", + "the-basics", + { + type: "category", + label: "Getting Started", + link: { + type: "doc", + id: "getting-started/index", + }, + items: [{ type: "autogenerated", dirName: "getting-started" }], + }, + { + type: "category", + label: "Frontend", + link: { + type: "doc", + id: "frontend/index", + }, + items: [{ type: "autogenerated", dirName: "frontend" }], + }, + { + type: "category", + label: "Backend", + link: { + type: "doc", + id: "backend/index", + }, + items: [{ type: "autogenerated", dirName: "backend" }], + }, + { + type: "category", + label: "Production", + link: { + type: "doc", + id: "production/index", + }, + items: [{ type: "autogenerated", dirName: "production" }], + }, + { + type: "category", + label: "TS/JS API Reference", + link: { + type: "doc", + id: "ts-js-api/index", + }, + items: [{ type: "autogenerated", dirName: "ts-js-api" }], + }, + { + type: "category", + label: "GraphQL API Examples", + items: [{ type: "autogenerated", dirName: "graphql-api-examples" }], + }, + { + type: "category", + label: "GraphQL API Reference", + link: { + type: "doc", + id: "graphql-api/index", + }, + items: [{ type: "autogenerated", dirName: "graphql-api" }], + }, + { + type: "category", + label: "Contributing", + link: { + type: "doc", + id: "contributing/index", + }, + items: [{ type: "autogenerated", dirName: "contributing" }], + }, + ], }; module.exports = sidebars; diff --git a/docs/static/img/dev-playground.png b/docs/static/img/dev-playground.png new file mode 100644 index 0000000..f096409 Binary files /dev/null and b/docs/static/img/dev-playground.png differ diff --git a/docs/static/img/ex1.png b/docs/static/img/ex1.png new file mode 100644 index 0000000..7096d1b Binary files /dev/null and b/docs/static/img/ex1.png differ diff --git a/docs/static/img/ex2.png b/docs/static/img/ex2.png new file mode 100644 index 0000000..3bd8700 Binary files /dev/null and b/docs/static/img/ex2.png differ diff --git a/docs/static/img/ex3.png b/docs/static/img/ex3.png new file mode 100644 index 0000000..391c387 Binary files /dev/null and b/docs/static/img/ex3.png differ diff --git a/examples/be-example-express-sqlite/CHANGELOG.md b/examples/be-example-express-sqlite/CHANGELOG.md index 9fa1780..8221b33 100644 --- a/examples/be-example-express-sqlite/CHANGELOG.md +++ b/examples/be-example-express-sqlite/CHANGELOG.md @@ -391,7 +391,7 @@ - Update package metadata - Updated dependencies - convostack@0.0.4 - - @convostack/agent@0.0.4 + - convostack/agent@0.0.4 ## 0.0.1 @@ -400,4 +400,4 @@ - Update build and packaging strategy - Updated dependencies - convostack@0.0.3 - - @convostack/agent@0.0.3 + - convostack/agent@0.0.3 diff --git a/examples/be-example-express-sqlite/src/agent-playground.ts b/examples/be-example-express-sqlite/src/agent-playground.ts index 4f08c18..c483d34 100644 --- a/examples/be-example-express-sqlite/src/agent-playground.ts +++ b/examples/be-example-express-sqlite/src/agent-playground.ts @@ -1,4 +1,4 @@ -import {playground} from "@convostack/playground"; -import {AgentEcho} from "@convostack/agent-echo"; +import { playground } from "convostack/playground"; +import { AgentEcho } from "convostack/agent-echo"; -playground(new AgentEcho()) \ No newline at end of file +playground(new AgentEcho());