Skip to content

Commit

Permalink
Nc/llmchain functions (langchain-ai#1699)
Browse files Browse the repository at this point in the history
* Refactor openai functions chains to use output parsers

* Lint

* Rename

* Add docs
  • Loading branch information
nfcampos authored Jun 19, 2023
1 parent 46f8147 commit 29015fb
Show file tree
Hide file tree
Showing 25 changed files with 298 additions and 152 deletions.
24 changes: 24 additions & 0 deletions docs/docs/modules/chains/openai_functions/index.mdx
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
---
hide_table_of_contents: true
sidebar_position: 4
---

import CodeBlock from "@theme/CodeBlock";
import Extraction from "@examples/chains/openai_functions_extraction.ts";
import Tagging from "@examples/chains/openai_functions_tagging.ts";

# OpenAI Functions Chains

These chains are designed to be used with the [OpenAI Functions](https://platform.openai.com/docs/guides/gpt/function-calling) API.

## Extraction

This chain is designed to extract lists of objects from an input text and schema of desired info.

<CodeBlock language="typescript">{Extraction}</CodeBlock>

## Tagging

This chain is designed to tag an input text according to properties defined in a schema.

<CodeBlock language="typescript">{Tagging}</CodeBlock>
37 changes: 37 additions & 0 deletions examples/src/chains/openai_functions_extraction.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import { z } from "zod";
import { ChatOpenAI } from "langchain/chat_models/openai";
import { createExtractionChainFromZod } from "langchain/chains";

const chain = createExtractionChainFromZod(
z.object({
"person-name": z.string().optional(),
"person-age": z.number().optional(),
"person-hair_color": z.string().optional(),
"dog-name": z.string().optional(),
"dog-breed": z.string().optional(),
}),
new ChatOpenAI({ modelName: "gpt-3.5-turbo-0613", temperature: 0 })
);

console.log(
await chain.run(`Alex is 5 feet tall. Claudia is 4 feet taller Alex and jumps higher than him. Claudia is a brunette and Alex is blonde.
Alex's dog Frosty is a labrador and likes to play hide and seek.`)
);
/*
[
{
'person-name': 'Alex',
'person-age': 0,
'person-hair_color': 'blonde',
'dog-name': 'Frosty',
'dog-breed': 'labrador'
},
{
'person-name': 'Claudia',
'person-age': 0,
'person-hair_color': 'brunette',
'dog-name': '',
'dog-breed': ''
}
]
*/
24 changes: 24 additions & 0 deletions examples/src/chains/openai_functions_tagging.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import { createTaggingChain } from "langchain/chains";
import { ChatOpenAI } from "langchain/chat_models/openai";

const chain = createTaggingChain(
{
type: "object",
properties: {
sentiment: { type: "string" },
tone: { type: "string" },
language: { type: "string" },
},
required: ["tone"],
},
new ChatOpenAI({ modelName: "gpt-4-0613", temperature: 0 })
);

console.log(
await chain.run(
`Estoy increiblemente contento de haberte conocido! Creo que seremos muy buenos amigos!`
)
);
/*
{ tone: 'positive', language: 'Spanish' }
*/
12 changes: 7 additions & 5 deletions examples/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { awaitAllCallbacks } from "langchain/callbacks";
import path from "path";
import url from "url";

Expand Down Expand Up @@ -44,10 +45,11 @@ if (runExample) {
const maybePromise = runExample(args);

if (maybePromise instanceof Promise) {
maybePromise.catch((e) => {
console.error(`Example failed with:`);
console.error(e);
process.exit(1);
});
maybePromise
.catch((e) => {
console.error(`Example failed with:`);
console.error(e);
})
.finally(() => awaitAllCallbacks());
}
}
3 changes: 3 additions & 0 deletions langchain/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -394,6 +394,9 @@ experimental/plan_and_execute.d.ts
client.cjs
client.js
client.d.ts
evaluation.cjs
evaluation.js
evaluation.d.ts
index.cjs
index.js
index.d.ts
8 changes: 8 additions & 0 deletions langchain/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -406,6 +406,9 @@
"client.cjs",
"client.js",
"client.d.ts",
"evaluation.cjs",
"evaluation.js",
"evaluation.d.ts",
"index.cjs",
"index.js",
"index.d.ts"
Expand Down Expand Up @@ -1426,6 +1429,11 @@
"import": "./client.js",
"require": "./client.cjs"
},
"./evaluation": {
"types": "./evaluation.d.ts",
"import": "./evaluation.js",
"require": "./evaluation.cjs"
},
"./package.json": "./package.json"
}
}
6 changes: 0 additions & 6 deletions langchain/src/chains/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -84,12 +84,6 @@ export {
export { MultiPromptChain } from "./router/multi_prompt.js";
export { MultiRetrievalQAChain } from "./router/multi_retrieval_qa.js";
export { TransformChain, TransformChainFields } from "./transform.js";
export {
OpenAIFunctionsChain,
OpenAIFunctionsChainFields,
parseToArguments,
parseToNamedArgument,
} from "./openai_functions/index.js";
export {
createExtractionChain,
createExtractionChainFromZod,
Expand Down
48 changes: 32 additions & 16 deletions langchain/src/chains/llm_chain.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,19 +2,27 @@ import { BaseChain, ChainInputs } from "./base.js";
import { BasePromptTemplate } from "../prompts/base.js";
import { BaseLanguageModel } from "../base_language/index.js";
import { ChainValues, Generation, BasePromptValue } from "../schema/index.js";
import { BaseOutputParser } from "../schema/output_parser.js";
import {
BaseLLMOutputParser,
BaseOutputParser,
} from "../schema/output_parser.js";
import { SerializedLLMChain } from "./serde.js";
import { CallbackManager } from "../callbacks/index.js";
import { CallbackManagerForChainRun, Callbacks } from "../callbacks/manager.js";
import { NoOpOutputParser } from "../output_parsers/noop.js";

export interface LLMChainInput<T extends string | object = string>
extends ChainInputs {
export interface LLMChainInput<
T extends string | object = string,
L extends BaseLanguageModel = BaseLanguageModel
> extends ChainInputs {
/** Prompt object to use */
prompt: BasePromptTemplate;
/** LLM Wrapper to use */
llm: BaseLanguageModel;
llm: L;
/** Kwargs to pass to LLM */
llmKwargs?: this["llm"]["CallOptions"];
/** OutputParser to use */
outputParser?: BaseOutputParser<T>;
outputParser?: BaseLLMOutputParser<T>;
/** Key to use for output, defaults to `text` */
outputKey?: string;
}
Expand All @@ -32,19 +40,24 @@ export interface LLMChainInput<T extends string | object = string>
* const llm = new LLMChain({ llm: new OpenAI(), prompt });
* ```
*/
export class LLMChain<T extends string | object = string>
export class LLMChain<
T extends string | object = string,
L extends BaseLanguageModel = BaseLanguageModel
>
extends BaseChain
implements LLMChainInput<T>
{
lc_serializable = true;

prompt: BasePromptTemplate;

llm: BaseLanguageModel;
llm: L;

llmKwargs?: this["llm"]["CallOptions"];

outputKey = "text";

outputParser?: BaseOutputParser<T>;
outputParser?: BaseLLMOutputParser<T>;

get inputKeys() {
return this.prompt.inputVariables;
Expand All @@ -54,14 +67,16 @@ export class LLMChain<T extends string | object = string>
return [this.outputKey];
}

constructor(fields: LLMChainInput<T>) {
constructor(fields: LLMChainInput<T, L>) {
super(fields);
this.prompt = fields.prompt;
this.llm = fields.llm;
this.llmKwargs = fields.llmKwargs;
this.outputKey = fields.outputKey ?? this.outputKey;
this.outputParser = fields.outputParser ?? this.outputParser;
this.outputParser =
fields.outputParser ?? (new NoOpOutputParser() as BaseOutputParser<T>);
if (this.prompt.outputParser) {
if (this.outputParser) {
if (fields.outputParser) {
throw new Error("Cannot set both outputParser and prompt.outputParser");
}
this.outputParser = this.prompt.outputParser as BaseOutputParser<T>;
Expand All @@ -85,16 +100,15 @@ export class LLMChain<T extends string | object = string>
promptValue: BasePromptValue,
runManager?: CallbackManagerForChainRun
): Promise<unknown> {
const completion = generations[0].text;
let finalCompletion: unknown;
if (this.outputParser) {
finalCompletion = await this.outputParser.parseWithPrompt(
completion,
finalCompletion = await this.outputParser.parseResultWithPrompt(
generations,
promptValue,
runManager?.getChild()
);
} else {
finalCompletion = completion;
finalCompletion = generations[0].text;
}
return finalCompletion;
}
Expand All @@ -117,7 +131,9 @@ export class LLMChain<T extends string | object = string>
runManager?: CallbackManagerForChainRun
): Promise<ChainValues> {
const valuesForPrompt = { ...values };
const valuesForLLM: this["llm"]["CallOptions"] = {};
const valuesForLLM: this["llm"]["CallOptions"] = {
...this.llmKwargs,
};
for (const key of this.llm.callKeys) {
if (key in values) {
valuesForLLM[key as keyof this["llm"]["CallOptions"]] = values[key];
Expand Down
21 changes: 10 additions & 11 deletions langchain/src/chains/openai_functions/extraction.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,11 @@ import { JsonSchema7ObjectType } from "zod-to-json-schema/src/parsers/object.js"

import { ChatOpenAI } from "../../chat_models/openai.js";
import { PromptTemplate } from "../../prompts/prompt.js";
import { TransformChain } from "../transform.js";
import { SimpleSequentialChain } from "../sequential_chain.js";
import {
FunctionParameters,
OpenAIFunctionsChain,
parseToNamedArgument,
} from "./index.js";
JsonKeyOutputFunctionsParser,
} from "../../output_parsers/openai_functions.js";
import { LLMChain } from "../llm_chain.js";

function getExtractionFunctions(schema: FunctionParameters) {
return [
Expand Down Expand Up @@ -47,13 +45,14 @@ export function createExtractionChain(
) {
const functions = getExtractionFunctions(schema);
const prompt = PromptTemplate.fromTemplate(_EXTRACTION_TEMPLATE);
const chain = new OpenAIFunctionsChain({ llm, prompt, functions });
const parsing_chain = new TransformChain({
transform: parseToNamedArgument.bind(null, "info"),
inputVariables: ["input"],
outputVariables: ["output"],
const outputParser = new JsonKeyOutputFunctionsParser({ attrName: "info" });
return new LLMChain({
llm,
prompt,
llmKwargs: { functions },
outputParser,
tags: ["openai_functions", "extraction"],
});
return new SimpleSequentialChain({ chains: [chain, parsing_chain] });
}

export function createExtractionChainFromZod(
Expand Down
Loading

0 comments on commit 29015fb

Please sign in to comment.