From 3398b61125b9a78cb269970bc35c5d5d7513c01d Mon Sep 17 00:00:00 2001 From: ikaros <327209194@qq.com> Date: Fri, 18 Aug 2023 00:23:54 +0800 Subject: [PATCH] =?UTF-8?q?=E5=90=8E=E7=BD=AE=E9=83=A8=E5=88=86GPT=5FMODEL?= =?UTF-8?q?=E9=85=8D=E7=BD=AE=EF=BC=8C=E8=A7=84=E9=81=BF=E4=B8=80=E6=AC=A1?= =?UTF-8?q?=E6=80=A7=E5=85=A8=E5=8A=A0=E8=BD=BD=E9=97=AE=E9=A2=98=EF=BC=9B?= =?UTF-8?q?=E4=BF=AE=E5=A4=8D=20LLM=E9=83=A8=E5=88=86GUI=E6=9D=BF=E5=9D=97?= =?UTF-8?q?=E6=98=BE=E9=9A=90bug?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 4 ++++ main.py | 2 +- utils/gpt_model/claude2.py | 3 ++- utils/my_handle.py | 19 ++++++++++++------- 4 files changed, 19 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index a3310df0..07c82d7d 100644 --- a/README.md +++ b/README.md @@ -1435,6 +1435,10 @@ cmd运行`npm i docsify-cli -g` - 美化logo - 简单的初步美化下GUI +### 2023-08-18 +- 后置部分GPT_MODEL配置,规避一次性全加载问题 +- 修复 LLM部分GUI板块显隐bug + diff --git a/main.py b/main.py index b9e4506a..087e3e49 100644 --- a/main.py +++ b/main.py @@ -2729,7 +2729,7 @@ def oncomboBox_chat_type_IndexChanged(self, index): 7: (0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0), 8: (0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0), 9: (0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0), - 9: (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1), + 10: (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1), } visibility_values = visibility_map.get(index, (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)) diff --git a/utils/gpt_model/claude2.py b/utils/gpt_model/claude2.py index b1b416f8..b8558a69 100644 --- a/utils/gpt_model/claude2.py +++ b/utils/gpt_model/claude2.py @@ -160,6 +160,7 @@ def send_message(self, prompt, conversation_id, attachment=None): completions = [] for data_string in data_strings: json_str = data_string[6:].strip() + logging.debug(f"json_str={json_str}") data = json.loads(json_str) if 'completion' in data: completions.append(data['completion']) @@ -344,7 +345,7 @@ def get_claude2_resp(self, prompt): resp_content = self.send_message(prompt, self.conversation_id) return resp_content except Exception as e: - logging.info(e) + logging.error(traceback.format_exc()) return None diff --git a/utils/my_handle.py b/utils/my_handle.py index 9f0278ad..68476995 100644 --- a/utils/my_handle.py +++ b/utils/my_handle.py @@ -112,12 +112,7 @@ def __init__(self, config_path): # 设置GPT_Model全局模型列表 GPT_MODEL.set_model_config("openai", self.openai_config) GPT_MODEL.set_model_config("chatgpt", self.chatgpt_config) - GPT_MODEL.set_model_config("claude", self.claude_config) - GPT_MODEL.set_model_config("claude2", self.claude2_config) - GPT_MODEL.set_model_config("chatglm", self.chatglm_config) - GPT_MODEL.set_model_config("text_generation_webui", self.text_generation_webui_config) - GPT_MODEL.set_model_config("sparkdesk", self.sparkdesk_config) - GPT_MODEL.set_model_config("langchain_chatglm", self.langchain_chatglm_config) + GPT_MODEL.set_model_config("claude", self.claude_config) self.chatgpt = None self.claude = None @@ -140,6 +135,8 @@ def __init__(self, config_path): if not self.claude.reset_claude(): logging.error("重置Claude会话失败喵~") elif self.chat_type == "claude2": + GPT_MODEL.set_model_config("claude2", self.claude2_config) + self.claude2 = GPT_MODEL.get(self.chat_type) # 初次运行 先重置下会话 @@ -156,15 +153,23 @@ def __init__(self, config_path): logging.info(e) exit(0) elif self.chat_type == "chatglm": + GPT_MODEL.set_model_config("chatglm", self.chatglm_config) + self.chatglm = GPT_MODEL.get(self.chat_type) elif self.chat_type == "chat_with_file": from utils.chat_with_file.chat_with_file import Chat_with_file self.chat_with_file = Chat_with_file(self.chat_with_file_config) elif self.chat_type == "text_generation_webui": + GPT_MODEL.set_model_config("text_generation_webui", self.text_generation_webui_config) + self.text_generation_webui = GPT_MODEL.get(self.chat_type) elif self.chat_type == "sparkdesk": + GPT_MODEL.set_model_config("sparkdesk", self.sparkdesk_config) + self.sparkdesk = GPT_MODEL.get(self.chat_type) elif self.chat_type == "langchain_chatglm": + GPT_MODEL.set_model_config("langchain_chatglm", self.langchain_chatglm_config) + self.langchain_chatglm = GPT_MODEL.get(self.chat_type) elif self.chat_type == "game": # from game.game import Game @@ -843,7 +848,7 @@ def comment_handle(self, data): logging.info(f"[AI回复{user_name}]:{resp_content}") else: resp_content = "" - logging.warning("警告:claude无返回") + logging.warning("警告:claude2无返回") elif self.chat_type == "chatterbot": # 生成回复 resp_content = self.bot.get_response(content).text