Skip to content

Commit

Permalink
Merge branch 'main' into pe/assistant-select
Browse files Browse the repository at this point in the history
  • Loading branch information
sestinj authored Feb 1, 2025
2 parents 55d7647 + 9ae37ac commit 8234137
Show file tree
Hide file tree
Showing 3 changed files with 121 additions and 92 deletions.
203 changes: 116 additions & 87 deletions core/llm/llm.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,13 @@ function testLLM(
skip,
testFim,
testToolCall,
}: { skip?: boolean; testFim?: boolean; testToolCall?: boolean },
timeout,
}: {
skip?: boolean;
testFim?: boolean;
testToolCall?: boolean;
timeout?: number;
},
) {
if (skip) {
return;
Expand All @@ -37,113 +43,136 @@ function testLLM(
});

describe(llm.providerName + "/" + llm.model, () => {
test("Stream Chat works", async () => {
let total = "";
for await (const chunk of llm.streamChat(
[{ role: "user", content: "Hi" }],
new AbortController().signal,
)) {
total += chunk.content;
}

expect(total.length).toBeGreaterThan(0);
return;
});

test("Stream Complete works", async () => {
let total = "";
for await (const chunk of llm.streamComplete(
"Hi",
new AbortController().signal,
)) {
total += chunk;
}

expect(total.length).toBeGreaterThan(0);
return;
});

test("Complete works", async () => {
const completion = await llm.complete("Hi", new AbortController().signal);

expect(completion.length).toBeGreaterThan(0);
return;
});
test(
"Stream Chat works",
async () => {
let total = "";
for await (const chunk of llm.streamChat(
[{ role: "user", content: "Hi" }],
new AbortController().signal,
)) {
total += chunk.content;
}

if (testFim) {
test("FIM works", async () => {
expect(total.length).toBeGreaterThan(0);
return;
},
timeout,
);

test(
"Stream Complete works",
async () => {
let total = "";
for await (const chunk of llm.streamFim(
for await (const chunk of llm.streamComplete(
"Hi",
"name is ChatGPT.",
new AbortController().signal,
)) {
total += chunk;
}

expect(total.length).toBeGreaterThan(0);
return;
});
},
timeout,
);

test(
"Complete works",
async () => {
const completion = await llm.complete(
"Hi",
new AbortController().signal,
);

expect(completion.length).toBeGreaterThan(0);
return;
},
timeout,
);

if (testFim) {
test(
"FIM works",
async () => {
let total = "";
for await (const chunk of llm.streamFim(
"Hi",
"name is ChatGPT.",
new AbortController().signal,
)) {
total += chunk;
}

expect(total.length).toBeGreaterThan(0);
return;
},
timeout,
);
}

if (testToolCall) {
test("Tool Call works", async () => {
let args = "";
let isFirstChunk = true;
for await (const chunk of llm.streamChat(
[{ role: "user", content: "Hi, my name is Nate." }],
new AbortController().signal,
{
tools: [
{
displayTitle: "Say Hello",
function: {
name: "say_hello",
description: "Say Hello",
parameters: {
type: "object",
properties: {
name: {
type: "string",
description: "The name of the person to greet",
test(
"Tool Call works",
async () => {
let args = "";
let isFirstChunk = true;
for await (const chunk of llm.streamChat(
[{ role: "user", content: "Hi, my name is Nate." }],
new AbortController().signal,
{
tools: [
{
displayTitle: "Say Hello",
function: {
name: "say_hello",
description: "Say Hello",
parameters: {
type: "object",
properties: {
name: {
type: "string",
description: "The name of the person to greet",
},
},
},
},
type: "function",
wouldLikeTo: "Say hello",
readonly: true,
},
],
toolChoice: {
type: "function",
wouldLikeTo: "Say hello",
readonly: true,
},
],
toolChoice: {
type: "function",
function: {
name: "say_hello",
function: {
name: "say_hello",
},
},
},
},
)) {
const typedChunk = chunk as AssistantChatMessage;
if (!typedChunk.toolCalls) {
continue;
)) {
const typedChunk = chunk as AssistantChatMessage;
if (!typedChunk.toolCalls) {
continue;
}
const toolCall = typedChunk.toolCalls[0];
args += toolCall.function?.arguments ?? "";

expect(chunk.role).toBe("assistant");
expect(chunk.content).toBe("");
expect(typedChunk.toolCalls).toHaveLength(1);

if (isFirstChunk) {
isFirstChunk = false;
expect(toolCall.id).toBeDefined();
expect(toolCall.function!.name).toBe("say_hello");
}
}
const toolCall = typedChunk.toolCalls[0];
args += toolCall.function?.arguments ?? "";

expect(chunk.role).toBe("assistant");
expect(chunk.content).toBe("");
expect(typedChunk.toolCalls).toHaveLength(1);

if (isFirstChunk) {
isFirstChunk = false;
expect(toolCall.id).toBeDefined();
expect(toolCall.function!.name).toBe("say_hello");
}
}

const parsedArgs = JSON.parse(args);
expect(parsedArgs.name).toBe("Nate");
});
const parsedArgs = JSON.parse(args);
expect(parsedArgs.name).toBe("Nate");
},
timeout,
);
}
});
}
Expand All @@ -165,7 +194,7 @@ describe("LLM", () => {
});
testLLM(
new OpenAI({ apiKey: process.env.OPENAI_API_KEY, model: "o1-preview" }),
{ skip: false },
{ skip: false, timeout: 20000 },
);
testLLM(new OpenAI({ apiKey: process.env.OPENAI_API_KEY, model: "o1" }), {
skip: false,
Expand Down
4 changes: 2 additions & 2 deletions core/tools/definitions/searchWeb.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@ export const searchWebTool: Tool = {
"Performs a web search, returning top results. This tool should only be called for questions that require external knowledge. Common programming questions do not require web search.",
parameters: {
type: "object",
required: ["repo_url"],
required: ["query"],
properties: {
repo_url: {
query: {
type: "string",
description: "The natural language search query",
},
Expand Down
6 changes: 3 additions & 3 deletions docs/docs/customize/model-providers/top-level/xAI.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,15 @@ You can get an API key from the [xAI console](https://console.x.ai/)

## Chat model

We recommend configuring **grok-beta** as your chat model.
We recommend configuring **grok-2-latest** as your chat model. For information on other available models, visit [xAI Documentation](https://docs.x.ai/docs/models).

```json title="config.json"
{
"models": [
{
"title": "Grok Beta",
"title": "Grok 2",
"provider": "xAI",
"model": "grok-beta",
"model": "grok-2-latest",
"apiKey": "[API_KEY]"
}
]
Expand Down

0 comments on commit 8234137

Please sign in to comment.