forked from QwenLM/Qwen-Agent
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathworkstation_server.py
602 lines (512 loc) · 23.8 KB
/
workstation_server.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
import datetime
import json
import os
from pathlib import Path
try:
import add_qwen_libs # NOQA
except ImportError:
pass
from qwen_agent.agents import ArticleAgent, Assistant, ReActChat
from qwen_agent.gui import gr, mgr
from qwen_agent.gui.utils import get_avatar_image
from qwen_agent.llm import get_chat_model
from qwen_agent.llm.base import ModelServiceError
from qwen_agent.memory import Memory
from qwen_agent.tools.simple_doc_parser import PARSER_SUPPORTED_FILE_TYPES
from qwen_agent.utils.utils import get_basename_from_url, get_file_type, has_chinese_chars, save_text_to_file
from qwen_server import output_beautify
from qwen_server.schema import GlobalConfig
from qwen_server.utils import read_meta_data_by_condition, save_browsing_meta_data
# Read config
with open(Path(__file__).resolve().parent / 'server_config.json', 'r') as f:
server_config = json.load(f)
server_config = GlobalConfig(**server_config)
llm_config = None
if hasattr(server_config.server, 'llm'):
llm_config = {
'model': server_config.server.llm,
'api_key': server_config.server.api_key,
'model_server': server_config.server.model_server
}
app_global_para = {
'time': [str(datetime.date.today()), str(datetime.date.today())],
'messages': [],
'last_turn_msg_id': [],
'is_first_upload': True,
'uploaded_ci_file': '',
'pure_messages': [],
'pure_last_turn_msg_id': [],
}
DOC_OPTION = 'Document QA'
CI_OPTION = 'Code Interpreter'
CODE_FLAG = '/code'
PLUGIN_FLAG = '/plug'
TITLE_FLAG = '/title'
with open(Path(__file__).resolve().parent / 'css/main.css', 'r') as f:
css = f.read()
with open(Path(__file__).resolve().parent / 'js/main.js', 'r') as f:
js = f.read()
meta_file = os.path.join(server_config.path.work_space_root, 'meta_data.jsonl')
def add_text(history, text):
history = history + [(text, None)]
app_global_para['last_turn_msg_id'] = []
return history, gr.update(value='', interactive=False)
def pure_add_text(history, text):
history = history + [(text, None)]
app_global_para['pure_last_turn_msg_id'] = []
return history, gr.update(value='', interactive=False)
def rm_text(history):
if not history:
gr.Warning('No input content!')
elif not history[-1][1]:
return history, gr.update(value='', interactive=False)
else:
history = history[:-1] + [(history[-1][0].text, None)]
return history, gr.update(value='', interactive=False)
def chat_clear():
app_global_para['messages'] = []
return None, None
def chat_clear_pure():
app_global_para['pure_messages'] = []
return None, None
def chat_clear_last():
for index in app_global_para['last_turn_msg_id'][::-1]:
del app_global_para['messages'][index]
app_global_para['last_turn_msg_id'] = []
def pure_chat_clear_last():
for index in app_global_para['pure_last_turn_msg_id'][::-1]:
del app_global_para['pure_messages'][index]
app_global_para['pure_last_turn_msg_id'] = []
def add_file(file, chosen_plug):
display_path = get_basename_from_url(file.name)
if chosen_plug == CI_OPTION:
app_global_para['uploaded_ci_file'] = file.name
app_global_para['is_first_upload'] = True
return display_path
f_type = get_file_type(file)
if f_type not in PARSER_SUPPORTED_FILE_TYPES:
display_path = (
f'Upload failed: only adding {", ".join(PARSER_SUPPORTED_FILE_TYPES)} as references is supported!')
else:
# cache file
try:
mem = Memory()
*_, last = mem.run([{'role': 'user', 'content': [{'file': file.name}]}])
title = display_path
save_browsing_meta_data(file.name, title, meta_file)
except Exception as ex:
raise ValueError(ex)
return display_path
def update_app_global_para(date1, date2):
app_global_para['time'][0] = date1
app_global_para['time'][1] = date2
def refresh_date():
option = [str(datetime.date.today() - datetime.timedelta(days=i)) for i in range(server_config.server.max_days)]
return (gr.update(choices=option,
value=str(datetime.date.today())), gr.update(choices=option, value=str(datetime.date.today())))
def update_browser_list():
br_list = read_meta_data_by_condition(meta_file, time_limit=app_global_para['time'])
if not br_list:
return 'No browsing records'
br_list = [[line['url'], line['title'], line['checked']] for line in br_list]
res = '<ol>{bl}</ol>'
bl = ''
for i, x in enumerate(br_list):
ck = '<input type="checkbox" class="custom-checkbox" id="ck-' + x[0] + '" '
if x[2]:
ck += 'checked>'
else:
ck += '>'
bl += '<li>{checkbox}{title}<a href="{url}"> [url]</a></li>'.format(checkbox=ck, url=x[0], title=x[1])
res = res.format(bl=bl)
return res
def layout_to_right(text):
return text, text
def download_text(text):
now = datetime.datetime.now()
current_time = now.strftime('%Y-%m-%d_%H-%M-%S')
filename = f'file_{current_time}.md'
save_path = os.path.join(server_config.path.download_root, filename)
try:
save_text_to_file(save_path, text)
gr.Info(f'Saved to {save_path}')
except Exception as ex:
gr.Error(f'Failed to save this file.\n {str(ex)}')
def choose_plugin(chosen_plugin):
if chosen_plugin == CI_OPTION:
gr.Info('Code execution is NOT sandboxed. Do NOT ask Qwen to perform dangerous tasks.')
if chosen_plugin == CI_OPTION or chosen_plugin == DOC_OPTION:
return gr.update(interactive=True), None
else:
return gr.update(interactive=False), None
def pure_bot(history):
if not history:
yield history
else:
history[-1][1] = ''
message = [{'role': 'user', 'content': history[-1][0].text, 'name': 'pure_chat_user'}]
try:
llm = get_chat_model(llm_config)
response = llm.chat(messages=app_global_para['pure_messages'] + message)
rsp = []
for rsp in response:
if rsp:
history[-1][1] = rsp[-1]['content']
yield history
# Record the conversation history when the conversation succeeds
app_global_para['pure_last_turn_msg_id'].append(len(app_global_para['pure_messages']))
app_global_para['pure_messages'].extend(message) # New user message
app_global_para['pure_last_turn_msg_id'].append(len(app_global_para['pure_messages']))
app_global_para['pure_messages'].extend(rsp) # The response
except ModelServiceError as ex:
history[-1][1] = str(ex)
yield history
except Exception as ex:
raise ValueError(ex)
def keep_only_files_for_name(messages, name):
new_messages = []
for message in messages:
if message['role'] == 'user' and ('name' not in message or message['name'] != name):
# rm files
if isinstance(message['content'], list):
new_content = []
for item in message['content']:
for k, v in item.items():
if k != 'file': # rm files
new_content.append(item)
new_messages.append({'role': message['role'], 'content': new_content})
else:
new_messages.append(message)
else:
new_messages.append(message)
return new_messages
def bot(history, chosen_plug):
if not history:
yield history
else:
history[-1][1] = ''
if chosen_plug == CI_OPTION: # use code interpreter
if app_global_para['uploaded_ci_file'] and app_global_para['is_first_upload']:
app_global_para['is_first_upload'] = False # only send file when first upload
message = [{
'role': 'user',
'content': [{
'text': history[-1][0].text
}, {
'file': app_global_para['uploaded_ci_file']
}],
'name': 'ci'
}]
else:
message = [{'role': 'user', 'content': history[-1][0].text, 'name': 'ci'}]
messages = keep_only_files_for_name(app_global_para['messages'], 'ci') + message
func_assistant = ReActChat(function_list=['code_interpreter'], llm=llm_config)
try:
response = func_assistant.run(messages=messages)
rsp = []
for rsp in response:
if rsp:
history[-1][1] = rsp[-1]['content']
yield history
# append message
app_global_para['last_turn_msg_id'].append(len(app_global_para['messages']))
app_global_para['messages'].extend(message)
app_global_para['last_turn_msg_id'].append(len(app_global_para['messages']))
app_global_para['messages'].extend(rsp)
except ModelServiceError as ex:
history[-1][1] = str(ex)
yield history
except Exception as ex:
raise ValueError(ex)
else:
try:
content = [{'text': history[-1][0].text}]
# checked files
for record in read_meta_data_by_condition(meta_file, time_limit=app_global_para['time'], checked=True):
content.append({'file': record['url']})
qa_assistant = Assistant(llm=llm_config)
message = [{'role': 'user', 'content': content}]
# rm all files of history
messages = keep_only_files_for_name(app_global_para['messages'], 'None') + message
response = qa_assistant.run(messages=messages, max_ref_token=server_config.server.max_ref_token)
rsp = []
for rsp in response:
if rsp:
history[-1][1] = rsp[-1]['content']
yield history
# append message
app_global_para['last_turn_msg_id'].append(len(app_global_para['messages']))
app_global_para['messages'].extend(message)
app_global_para['last_turn_msg_id'].append(len(app_global_para['messages']))
app_global_para['messages'].extend(rsp)
except ModelServiceError as ex:
history[-1][1] = str(ex)
yield history
except Exception as ex:
raise ValueError(ex)
def get_last_one_line_context(text):
lines = text.split('\n')
n = len(lines)
res = ''
for i in range(n - 1, -1, -1):
if lines[i].strip():
res = lines[i]
break
return res
def generate(context):
sp_query = get_last_one_line_context(context)
if CODE_FLAG in sp_query: # router to code interpreter
sp_query = sp_query.split(CODE_FLAG)[-1]
if has_chinese_chars(sp_query):
sp_query += ', 必须使用code_interpreter工具'
else:
sp_query += ' (Please use code_interpreter.)'
func_assistant = ReActChat(function_list=['code_interpreter'], llm=llm_config)
try:
response = func_assistant.run(messages=[{'role': 'user', 'content': sp_query}])
for rsp in response:
if rsp:
yield rsp[-1]['content']
except ModelServiceError as ex:
yield str(ex)
except Exception as ex:
raise ValueError(ex)
elif PLUGIN_FLAG in sp_query: # router to plugin
sp_query = sp_query.split(PLUGIN_FLAG)[-1]
func_assistant = ReActChat(function_list=['code_interpreter', 'image_gen'], llm=llm_config)
try:
response = func_assistant.run(messages=[{'role': 'user', 'content': sp_query}])
for rsp in response:
if rsp:
yield rsp[-1]['content']
except ModelServiceError as ex:
yield str(ex)
except Exception as ex:
raise ValueError(ex)
else: # router to continue writing
sp_query_no_title = context
if TITLE_FLAG in sp_query: # /title
sp_query_no_title = sp_query.split(TITLE_FLAG)[-1]
full_article = False
if TITLE_FLAG in sp_query: # /title
full_article = True
try:
writing_assistant = ArticleAgent(llm=llm_config)
content = [{'text': sp_query_no_title}]
# checked files
for record in read_meta_data_by_condition(meta_file, time_limit=app_global_para['time'], checked=True):
content.append({'file': record['url']})
response = writing_assistant.run(messages=[{
'role': 'user',
'content': content
}],
max_ref_token=server_config.server.max_ref_token,
full_article=full_article)
for rsp in response:
if rsp:
yield '\n'.join([x['content'] for x in rsp])
except ModelServiceError as ex:
yield str(ex)
except Exception as ex:
raise ValueError(ex)
def format_generate(edit, context):
res = edit
yield res
if '> Writing Text:' in context:
text = context.split('> Writing Text:')[-1].strip()
res += '\n'
res += text
yield res
elif 'Answer:' in context:
response = output_beautify.format_answer(context)
res += '\n'
res += response
yield res
else:
res += context
yield res
with gr.Blocks(css=css, js=js, theme='soft') as demo:
title = gr.Markdown('Qwen Agent: BrowserQwen', elem_classes='title')
desc = gr.Markdown(
'This is the editing workstation of BrowserQwen, where Qwen has collected the browsing history. Qwen can assist you in completing your creative work!',
elem_classes='desc',
)
with gr.Row():
with gr.Column():
rec = gr.Markdown('Browsing History', elem_classes='rec')
with gr.Row():
with gr.Column(scale=3, min_width=0):
date1 = gr.Dropdown(
[
str(datetime.date.today() - datetime.timedelta(days=i))
for i in range(server_config.server.max_days)
],
value=str(datetime.date.today()),
label='Start Date',
)
date2 = gr.Dropdown(
[
str(datetime.date.today() - datetime.timedelta(days=i))
for i in range(server_config.server.max_days)
],
value=str(datetime.date.today()),
label='End Date',
)
with gr.Column(scale=7, min_width=0):
browser_list = gr.HTML(
value='',
label='browser_list',
elem_classes=['div_tmp', 'add_scrollbar'],
)
with gr.Tab('Editor', elem_id='default-tab'):
with gr.Row():
with gr.Column():
with gr.Row():
edit_area = gr.Textbox(
value='',
elem_classes=['textbox_default', 'add_scrollbar'],
lines=30,
label='Input',
show_copy_button=True,
)
# token_count = gr.HTML(value='<span>0</span>',
# elem_classes=[
# 'token-counter',
# 'default-token-counter'
# ])
with gr.Row():
ctn_bt = gr.Button('Continue', variant='primary')
stop_bt = gr.Button('Stop')
clr_bt = gr.Button('Clear')
dld_bt = gr.Button('Download')
# with gr.Row():
# layout_bt = gr.Button('👉', variant='primary')
with gr.Column():
cmd_area = gr.Textbox(lines=10, max_lines=10, label="Qwen's Inner Thought", elem_id='cmd')
with gr.Tab('Markdown'):
# md_out_bt = gr.Button('Render')
md_out_area = gr.Markdown(elem_classes=['md_tmp', 'add_scrollbar'])
with gr.Tab('HTML'):
html_out_area = gr.HTML()
with gr.Tab('Raw'):
text_out_area = gr.Textbox(
lines=20,
label='',
elem_classes=['textbox_default_output', 'add_scrollbar'],
show_copy_button=True,
)
clk_ctn_bt = ctn_bt.click(generate, edit_area, cmd_area)
clk_ctn_bt.then(format_generate, [edit_area, cmd_area], edit_area)
edit_area_change = edit_area.change(layout_to_right, edit_area, [text_out_area, md_out_area])
stop_bt.click(lambda: None, cancels=[clk_ctn_bt], queue=False)
clr_bt.click(
lambda: [None, None, None],
None,
[edit_area, cmd_area, md_out_area],
queue=False,
)
dld_bt.click(download_text, edit_area, None)
# layout_bt.click(layout_to_right,
# edit_area, [text_out_area, md_out_area],
# queue=False)
gr.Markdown("""
### Usage Tips:
- Browsing History:
- Start Date/End Date: Selecting the browsed materials for the desired time period, including the start and end dates
- The browsed materials list: supporting the selection or removal of specific browsing content
- Editor: In the editing area, you can directly input content or special instructions, and then click the ```Continue``` button to have Qwen assist in completing the editing work:
- After inputting the content, directly click the ```Continue``` button: Qwen will begin to continue writing based on the browsing information
- Using special instructions:
- /title + content: Qwen enables the built-in planning process and writes a complete manuscript
- /code + content: Qwen enables the code interpreter plugin, writes and runs Python code, and generates replies
- /plug + content: Qwen enables plugin and select appropriate plugin to generate reply
- Chat: Interactive area. Qwen generates replies based on given reference materials. Selecting Code Interpreter will enable the code interpreter plugin
""")
with gr.Tab('Chat', elem_id='chat-tab'):
with gr.Column():
chatbot = mgr.Chatbot(
elem_id='chatbot',
height=680,
show_copy_button=True,
avatar_images=[None, get_avatar_image('qwen')],
flushing=False,
)
with gr.Row():
with gr.Column(scale=1, min_width=0):
file_btn = gr.UploadButton('Upload', file_types=['file'])
with gr.Column(scale=13):
chat_txt = gr.Textbox(
show_label=False,
placeholder='Chat with Qwen...',
container=False,
)
with gr.Column(scale=1, min_width=0):
chat_clr_bt = gr.Button('Clear')
with gr.Column(scale=1, min_width=0):
chat_stop_bt = gr.Button('Stop')
with gr.Column(scale=1, min_width=0):
chat_re_bt = gr.Button('Again')
with gr.Row():
with gr.Column(scale=2, min_width=0):
plug_bt = gr.Dropdown(
[CI_OPTION, DOC_OPTION],
label='Plugin',
info='',
value=DOC_OPTION,
)
with gr.Column(scale=8, min_width=0):
hidden_file_path = gr.Textbox(interactive=False, label='The uploaded file is displayed here')
txt_msg = chat_txt.submit(add_text, [chatbot, chat_txt], [chatbot, chat_txt],
queue=False).then(bot, [chatbot, plug_bt], chatbot)
txt_msg.then(lambda: gr.update(interactive=True), None, [chat_txt], queue=False)
re_txt_msg = (chat_re_bt.click(rm_text, [chatbot], [chatbot, chat_txt],
queue=False).then(chat_clear_last, None,
None).then(bot, [chatbot, plug_bt], chatbot))
re_txt_msg.then(lambda: gr.update(interactive=True), None, [chat_txt], queue=False)
file_msg = file_btn.upload(add_file, [file_btn, plug_bt], [hidden_file_path], queue=False)
file_msg.then(update_browser_list, None, browser_list)
chat_clr_bt.click(chat_clear, None, [chatbot, hidden_file_path], queue=False)
# re_bt.click(re_bot, chatbot, chatbot)
chat_stop_bt.click(chat_clear_last, None, None, cancels=[txt_msg, re_txt_msg], queue=False)
plug_bt.change(choose_plugin, plug_bt, [file_btn, hidden_file_path])
with gr.Tab('Pure Chat', elem_id='pure-chat-tab'):
gr.Markdown('Note: The chat box on this tab will not use any browsing history!')
with gr.Column():
pure_chatbot = mgr.Chatbot(
elem_id='pure_chatbot',
height=680,
show_copy_button=True,
avatar_images=[None, get_avatar_image('qwen')],
flushing=False,
)
with gr.Row():
with gr.Column(scale=13):
chat_txt = gr.Textbox(
show_label=False,
placeholder='Chat with Qwen...',
container=False,
)
with gr.Column(scale=1, min_width=0):
chat_clr_bt = gr.Button('Clear')
with gr.Column(scale=1, min_width=0):
chat_stop_bt = gr.Button('Stop')
with gr.Column(scale=1, min_width=0):
chat_re_bt = gr.Button('Again')
txt_msg = chat_txt.submit(pure_add_text, [pure_chatbot, chat_txt], [pure_chatbot, chat_txt],
queue=False).then(pure_bot, pure_chatbot, pure_chatbot)
txt_msg.then(lambda: gr.update(interactive=True), None, [chat_txt], queue=False)
re_txt_msg = chat_re_bt.click(rm_text, [pure_chatbot], [pure_chatbot, chat_txt],
queue=False).then(pure_chat_clear_last, None,
None).then(pure_bot, pure_chatbot, pure_chatbot)
re_txt_msg.then(lambda: gr.update(interactive=True), None, [chat_txt], queue=False)
chat_clr_bt.click(chat_clear_pure, None, pure_chatbot, queue=False)
chat_stop_bt.click(pure_chat_clear_last, None, None, cancels=[txt_msg, re_txt_msg], queue=False)
date1.change(update_app_global_para, [date1, date2],
None).then(update_browser_list, None, browser_list).then(chat_clear, None, [chatbot, hidden_file_path])
date2.change(update_app_global_para, [date1, date2],
None).then(update_browser_list, None, browser_list).then(chat_clear, None, [chatbot, hidden_file_path])
demo.load(update_app_global_para, [date1, date2],
None).then(refresh_date, None,
[date1, date2]).then(update_browser_list, None,
browser_list).then(chat_clear, None, [chatbot, hidden_file_path])
demo.queue().launch(server_name=server_config.server.server_host, server_port=server_config.server.workstation_port)