Skip to content

Commit

Permalink
add chat with streamlit ui
Browse files Browse the repository at this point in the history
  • Loading branch information
little51 committed Nov 11, 2024
1 parent 3b15827 commit ceb1a30
Show file tree
Hide file tree
Showing 3 changed files with 151 additions and 0 deletions.
24 changes: 24 additions & 0 deletions aicode/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# AI-Code-Test

## 1、环境

```shell
# 建立虚拟环境
conda create -n ai-code python=3.12 -y
# 激活虚拟环境
conda activate ai-code
# 安装依赖库
pip install -r requirements.txt -i https://mirrors.aliyun.com/pypi/simple/
```

## 2、运行

```shell
# 激活虚拟环境
conda activate ai-code
# 运行Chat程序
streamlit run chat_bot.py
# 后台运行程序
nohup streamlit run chat_bot.py >aicode.log 2>&1 &
```

124 changes: 124 additions & 0 deletions aicode/chat_bot.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
from openai import OpenAI
import streamlit as st
import streamlit.components.v1 as components
import random

client = OpenAI(
base_url="https://gitclone.com/aiit/ollama/v1",
api_key="EMPTY")


def clear_chat_history():
st.session_state.messages = [
{"role": "assistant", "content": ""}
]
setInputFocus()


def init_page():
st.set_page_config(
page_title="AI Chatbot",
page_icon=" ",
layout="wide",
)


def init_sidebar():
st.sidebar.title('AI Chatbot')
st.markdown(
"""
<style>
.st-emotion-cache-1huvf7z {display: none;}
.st-emotion-cache-w3nhqi {display: none;}
</style>
""",
unsafe_allow_html=True
)
# 选择模型
model_options = ('glm4', 'llama3.2', 'gemma2',
'qwen2.5', 'phi3.5', 'mistral-small',
'deepseek-coder-v2')
selected_model = st.sidebar.radio(
label='选择模型',
options=model_options,
index=0,
format_func=str,
help='',
key='selected_model', on_change=model_changed)
# 温度
temperature = st.sidebar.slider(
'温度', min_value=0.01, max_value=1.0, value=0.9, step=0.1, key='temperature')
# top_p
top_p = st.sidebar.slider('累计概率采样', min_value=0.01,
max_value=1.0, value=0.9, step=0.1, key='top_p')
# 最大长度
max_length = st.sidebar.slider(
'最大长度', min_value=64, max_value=4096, value=512, step=8, key='max_length')
# 清除聊天历史
st.sidebar.button('清除会话记录', on_click=clear_chat_history)
st.sidebar.markdown(f'''
<a href={'https://gitclone.com/aiit/chat/'}>旧版</a>
''',
unsafe_allow_html=True)


def setInputFocus():
unique_id = random.randint(1, 10000)
html_code = f"""
<div style="display: none">
<script key="{unique_id}">
setTimeout(() => {{
const input = window.parent.document.querySelectorAll(
'textarea[data-testid=stChatInputTextArea]')[0];
if (input) {{
input.focus();
}}
}}, 300);
</script>
</div>
"""
with st.sidebar.container():
st.components.v1.html(html_code, height=0)


def model_changed():
model = st.session_state.get('selected_model', '')
if 'last_selected_model' not in st.session_state or \
st.session_state.last_selected_model != model:
st.session_state.last_selected_model = model
st.session_state.messages = [
{"role": "assistant", "content": "您选择的模型是:" + model}
]
setInputFocus()


def chat_bot():
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.write(message["content"])
if prompt := st.chat_input(placeholder="请输入您的问题", key="chat_input"):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
model = st.session_state.get('selected_model', 'glm4')
if st.session_state.messages[-1]["role"] != "assistant":
with st.chat_message("assistant"):
stream = client.chat.completions.create(
model=model,
messages=[
{"role": m["role"], "content": m["content"]}
for m in st.session_state.messages
],
stream=True,
)
response = st.write_stream(stream)
st.session_state.messages.append(
{"role": "assistant", "content": response}
)


if __name__ == '__main__':
init_page()
init_sidebar()
model_changed()
chat_bot()
3 changes: 3 additions & 0 deletions aicode/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
openai==1.52.2
streamlit==1.39.0
streamlit_chat==0.1.1

0 comments on commit ceb1a30

Please sign in to comment.