forked from binary-husky/gpt_academic
-
Notifications
You must be signed in to change notification settings - Fork 0
/
test_llms.py
32 lines (29 loc) · 1.38 KB
/
test_llms.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
# """
# 对各个llm模型进行单元测试
# """
def validate_path():
import os, sys
dir_name = os.path.dirname(__file__)
root_dir_assume = os.path.abspath(os.path.dirname(__file__) + '/..')
os.chdir(root_dir_assume)
sys.path.append(root_dir_assume)
validate_path() # validate path so you can run from base directory
if __name__ == "__main__":
# from request_llm.bridge_newbingfree import predict_no_ui_long_connection
# from request_llm.bridge_moss import predict_no_ui_long_connection
# from request_llm.bridge_jittorllms_pangualpha import predict_no_ui_long_connection
# from request_llm.bridge_jittorllms_llama import predict_no_ui_long_connection
# from request_llm.bridge_claude import predict_no_ui_long_connection
# from request_llm.bridge_internlm import predict_no_ui_long_connection
# from request_llm.bridge_qwen import predict_no_ui_long_connection
from request_llm.bridge_spark import predict_no_ui_long_connection
llm_kwargs = {
'max_length': 4096,
'top_p': 1,
'temperature': 1,
}
result = predict_no_ui_long_connection( inputs="请问什么是质子?",
llm_kwargs=llm_kwargs,
history=["你好", "我好!"],
sys_prompt="")
print('final result:', result)