Skip to content

Commit

Permalink
chore: auto concat messages
Browse files Browse the repository at this point in the history
  • Loading branch information
fredliang44 committed Dec 24, 2023
1 parent cad461b commit 199f29e
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 26 deletions.
24 changes: 15 additions & 9 deletions app/client/platforms/google.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,24 @@ export class GeminiProApi implements LLMApi {
}
async chat(options: ChatOptions): Promise<void> {
const messages = options.messages.map((v) => ({
role: v.role.replace("assistant", "model").replace("system", "model"),
role: v.role.replace("assistant", "model").replace("system", "user"),
parts: [{ text: v.content }],
}));

// google requires that role in neighboring messages must not be the same
for (let i = 0; i < messages.length - 1; ) {
// Check if current and next item both have the role "model"
if (messages[i].role === messages[i + 1].role) {
// Concatenate the 'parts' of the current and next item
messages[i].parts = messages[i].parts.concat(messages[i + 1].parts);
// Remove the next item
messages.splice(i + 1, 1);
} else {
// Move to the next item
i++;
}
}

const modelConfig = {
...useAppConfig.getState().modelConfig,
...useChatStore.getState().currentSession().mask.modelConfig,
Expand All @@ -43,14 +57,6 @@ export class GeminiProApi implements LLMApi {
topP: modelConfig.top_p,
// "topK": modelConfig.top_k,
},
// stream: options.config.stream,
// model: modelConfig.model,
// temperature: modelConfig.temperature,
// presence_penalty: modelConfig.presence_penalty,
// frequency_penalty: modelConfig.frequency_penalty,
// top_p: modelConfig.top_p,
// max_tokens: Math.max(modelConfig.max_tokens, 1024),
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
};

console.log("[Request] google payload: ", requestPayload);
Expand Down
32 changes: 15 additions & 17 deletions app/store/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -389,24 +389,22 @@ export const useChatStore = createPersistStore(
const shouldInjectSystemPrompts = modelConfig.enableInjectSystemPrompts;

var systemPrompts: ChatMessage[] = [];
if (modelConfig.model !== "gemini-pro") {
systemPrompts = shouldInjectSystemPrompts
? [
createMessage({
role: "system",
content: fillTemplateWith("", {
...modelConfig,
template: DEFAULT_SYSTEM_TEMPLATE,
}),
systemPrompts = shouldInjectSystemPrompts
? [
createMessage({
role: "system",
content: fillTemplateWith("", {
...modelConfig,
template: DEFAULT_SYSTEM_TEMPLATE,
}),
]
: [];
if (shouldInjectSystemPrompts) {
console.log(
"[Global System Prompt] ",
systemPrompts.at(0)?.content ?? "empty",
);
}
}),
]
: [];
if (shouldInjectSystemPrompts) {
console.log(
"[Global System Prompt] ",
systemPrompts.at(0)?.content ?? "empty",
);
}

// long term memory
Expand Down

0 comments on commit 199f29e

Please sign in to comment.