Skip to content

Commit

Permalink
backend: auto-loading the models
Browse files Browse the repository at this point in the history
  • Loading branch information
iafarhan committed Jan 8, 2025
1 parent a69481a commit bc97a26
Show file tree
Hide file tree
Showing 11 changed files with 832 additions and 307 deletions.
File renamed without changes.
475 changes: 475 additions & 0 deletions demo/installer.py

Large diffs are not rendered by default.

37 changes: 22 additions & 15 deletions demo/modules/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -587,21 +587,25 @@ def find_all_histories_with_first_prompts(state):
result = []
for i, path in enumerate(histories):
filename = path.stem
if re.match(r'^[0-9]{8}-[0-9]{2}-[0-9]{2}-[0-9]{2}$', filename):
with open(path, 'r', encoding='utf-8') as f:
data = json.load(f)

first_prompt = ""
if data and 'visible' in data and len(data['visible']) > 0:
if data['internal'][0][0] == '<|BEGIN-VISIBLE-CHAT|>':
if len(data['visible']) > 1:
first_prompt = html.unescape(data['visible'][1][0])
elif i == 0:
first_prompt = "New chat"
else:
first_prompt = html.unescape(data['visible'][0][0])
elif i == 0:
first_prompt = "New chat"
try:
if re.match(r'^[0-9]{8}-[0-9]{2}-[0-9]{2}-[0-9]{2}$', filename):
with open(path, 'r', encoding='utf-8') as f:

data = json.load(f)

first_prompt = ""
if data and 'visible' in data and len(data['visible']) > 0:
if data['internal'][0][0] == '<|BEGIN-VISIBLE-CHAT|>':
if len(data['visible']) > 1:
first_prompt = html.unescape(data['visible'][1][0])
elif i == 0:
first_prompt = "New chat"
else:
first_prompt = html.unescape(data['visible'][0][0])
elif i == 0:
first_prompt = "New chat"
except:
pass
else:
first_prompt = filename

Expand Down Expand Up @@ -1045,6 +1049,7 @@ def handle_remove_last_click(state):


def handle_unique_id_select(state):
state['character_menu'] = state['character_menu'] or 'Assistant'
history = load_history(state['unique_id'], state['character_menu'], state['mode'])
html = redraw_html(history, state['name1'], state['name2'], state['mode'], state['chat_style'], state['character_menu'])

Expand Down Expand Up @@ -1126,6 +1131,8 @@ def handle_upload_chat_history(load_chat_history, state):


def handle_character_menu_change(state):

state['character_menu'] =state['character_menu'] or 'Assistant'
name1, name2, picture, greeting, context = load_character(state['character_menu'], state['name1'], state['name2'])

state['name1'] = name1
Expand Down
3 changes: 2 additions & 1 deletion demo/modules/exllamav2_hf.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@

try:
import flash_attn
except Exception:
except Exception as e:
print('thiss',e)
logger.warning('Failed to load flash-attention due to the following error:\n')
traceback.print_exc()

Expand Down
2 changes: 2 additions & 0 deletions demo/modules/extensions.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
import modules.shared as shared
from modules.logging_colors import logger



state = {}
available_extensions = []
setup_called = set()
Expand Down
16 changes: 13 additions & 3 deletions demo/modules/ui.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
import torch
import yaml
from transformers import is_torch_xpu_available

import json
from modules import shared

import modules.extensions as extensions
with open(Path(__file__).resolve().parent / '../css/NotoSans/stylesheet.css', 'r') as f:
css = f.read()
with open(Path(__file__).resolve().parent / '../css/main.css', 'r') as f:
Expand Down Expand Up @@ -217,8 +217,18 @@ def gather_interface_values(*args):

output = {}
for element, value in zip(interface_elements, args):
if element == 'history':
if "root" in value:
value = value.split("root=")[1]
value = value.replace("'",'"')
value = json.loads(value)


# output[element] = {'internal': [], 'visible': []}
# print(output[element])
# else:
output[element] = value

if not shared.args.multi_user:
shared.persistent_interface_state = output

Expand Down
64 changes: 32 additions & 32 deletions demo/modules/ui_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,37 +277,37 @@ def create_event_handlers():
shared.gradio['Copy last reply'].click(chat.send_last_reply_to_input, gradio('history'), gradio('textbox'), show_progress=False)

# Save/delete a character
shared.gradio['save_character'].click(chat.handle_save_character_click, gradio('name2'), gradio('save_character_filename', 'character_saver'), show_progress=False)
shared.gradio['delete_character'].click(lambda: gr.update(visible=True), None, gradio('character_deleter'), show_progress=False)
shared.gradio['load_template'].click(chat.handle_load_template_click, gradio('instruction_template'), gradio('instruction_template_str', 'instruction_template'), show_progress=False)
shared.gradio['save_template'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.handle_save_template_click, gradio('instruction_template_str'), gradio('save_filename', 'save_root', 'save_contents', 'file_saver'), show_progress=False)

shared.gradio['delete_template'].click(chat.handle_delete_template_click, gradio('instruction_template'), gradio('delete_filename', 'delete_root', 'file_deleter'), show_progress=False)
shared.gradio['save_chat_history'].click(
lambda x: json.dumps(x, indent=4), gradio('history'), gradio('temporary_text')).then(
None, gradio('temporary_text', 'character_menu', 'mode'), None, js=f'(hist, char, mode) => {{{ui.save_files_js}; saveHistory(hist, char, mode)}}')

shared.gradio['Submit character'].click(
chat.upload_character, gradio('upload_json', 'upload_img_bot'), gradio('character_menu'), show_progress=False).then(
None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_character()}}')

shared.gradio['Submit tavern character'].click(
chat.upload_tavern_character, gradio('upload_img_tavern', 'tavern_json'), gradio('character_menu'), show_progress=False).then(
None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_character()}}')

shared.gradio['upload_json'].upload(lambda: gr.update(interactive=True), None, gradio('Submit character'))
shared.gradio['upload_json'].clear(lambda: gr.update(interactive=False), None, gradio('Submit character'))
shared.gradio['upload_img_tavern'].upload(chat.check_tavern_character, gradio('upload_img_tavern'), gradio('tavern_name', 'tavern_desc', 'tavern_json', 'Submit tavern character'), show_progress=False)
shared.gradio['upload_img_tavern'].clear(lambda: (None, None, None, gr.update(interactive=False)), None, gradio('tavern_name', 'tavern_desc', 'tavern_json', 'Submit tavern character'), show_progress=False)
shared.gradio['your_picture'].change(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.handle_your_picture_change, gradio('your_picture', 'interface_state'), gradio('display'), show_progress=False)
# shared.gradio['save_character'].click(chat.handle_save_character_click, gradio('name2'), gradio('save_character_filename', 'character_saver'), show_progress=False)
# shared.gradio['delete_character'].click(lambda: gr.update(visible=True), None, gradio('character_deleter'), show_progress=False)
# shared.gradio['load_template'].click(chat.handle_load_template_click, gradio('instruction_template'), gradio('instruction_template_str', 'instruction_template'), show_progress=False)
# shared.gradio['save_template'].click(
# ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
# chat.handle_save_template_click, gradio('instruction_template_str'), gradio('save_filename', 'save_root', 'save_contents', 'file_saver'), show_progress=False)

# shared.gradio['delete_template'].click(chat.handle_delete_template_click, gradio('instruction_template'), gradio('delete_filename', 'delete_root', 'file_deleter'), show_progress=False)
# shared.gradio['save_chat_history'].click(
# lambda x: json.dumps(x, indent=4), gradio('history'), gradio('temporary_text')).then(
# None, gradio('temporary_text', 'character_menu', 'mode'), None, js=f'(hist, char, mode) => {{{ui.save_files_js}; saveHistory(hist, char, mode)}}')

# shared.gradio['Submit character'].click(
# chat.upload_character, gradio('upload_json', 'upload_img_bot'), gradio('character_menu'), show_progress=False).then(
# None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_character()}}')

# shared.gradio['Submit tavern character'].click(
# chat.upload_tavern_character, gradio('upload_img_tavern', 'tavern_json'), gradio('character_menu'), show_progress=False).then(
# None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_character()}}')

# shared.gradio['upload_json'].upload(lambda: gr.update(interactive=True), None, gradio('Submit character'))
# shared.gradio['upload_json'].clear(lambda: gr.update(interactive=False), None, gradio('Submit character'))
# shared.gradio['upload_img_tavern'].upload(chat.check_tavern_character, gradio('upload_img_tavern'), gradio('tavern_name', 'tavern_desc', 'tavern_json', 'Submit tavern character'), show_progress=False)
# shared.gradio['upload_img_tavern'].clear(lambda: (None, None, None, gr.update(interactive=False)), None, gradio('tavern_name', 'tavern_desc', 'tavern_json', 'Submit tavern character'), show_progress=False)
# shared.gradio['your_picture'].change(
# ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
# chat.handle_your_picture_change, gradio('your_picture', 'interface_state'), gradio('display'), show_progress=False)

shared.gradio['send_instruction_to_negative_prompt'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.handle_send_instruction_click, gradio('interface_state'), gradio('negative_prompt'), show_progress=False).then(
None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_generation_parameters()}}')
# shared.gradio['send_instruction_to_negative_prompt'].click(
# ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
# chat.handle_send_instruction_click, gradio('interface_state'), gradio('negative_prompt'), show_progress=False).then(
# None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_generation_parameters()}}')

shared.gradio['show_controls'].change(None, gradio('show_controls'), None, js=f'(x) => {{{ui.show_controls_js}; toggle_controls(x)}}')
# shared.gradio['show_controls'].change(None, gradio('show_controls'), None, js=f'(x) => {{{ui.show_controls_js}; toggle_controls(x)}}')
1 change: 0 additions & 1 deletion demo/modules/ui_model_menu.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,6 @@ def load_model_wrapper(selected_model, loader, autoload=False):
if selected_model != '':

shared.model_name = selected_model
print('here',selected_model, shared.model_name)
ModelClass = ModelRegistry.get_model(shared.model_name, None)
shared.handler = ModelClass()
shared.tokenizer, shared.model = shared.handler.load_model_tokenizer(model_name=shared.model_name)
Expand Down
1 change: 1 addition & 0 deletions demo/one_click.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,6 +402,7 @@ def update_requirements(initial_installation=False, pull=True):


def launch_webui():
print("arguments", flags)
run_cmd(f"python server.py {flags}", environment=True)


Expand Down
Loading

0 comments on commit bc97a26

Please sign in to comment.