Skip to content

Commit

Permalink
run throught pipeline (InternLM#4)
Browse files Browse the repository at this point in the history
Co-authored-by: liujiangning <[email protected]>
  • Loading branch information
Harold-lkk and liujiangning30 authored Jul 29, 2024
1 parent 9f57c63 commit 5bc152e
Show file tree
Hide file tree
Showing 4 changed files with 59 additions and 33 deletions.
6 changes: 2 additions & 4 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,6 @@

from src.agent import init_agent

# agent = os.environ.get('agent_cfg', dict())

app = FastAPI(docs_url='/')

app.add_middleware(CORSMiddleware,
Expand Down Expand Up @@ -95,10 +93,10 @@ async def async_generator_wrapper():
# yield f'data: {response_json}\n\n'

inputs = request.inputs
agent = init_agent(request.agent_cfg)
agent = init_agent(**request.agent_cfg)
return EventSourceResponse(generate())


if __name__ == '__main__':
import uvicorn
uvicorn.run(app, host='0.0.0.0', port=8090, log_level='info')
uvicorn.run(app, host='0.0.0.0', port=8002, log_level='info')
54 changes: 27 additions & 27 deletions src/agent/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,48 +2,48 @@
from datetime import datetime

from lagent.actions import ActionExecutor, BingBrowser
from lagent.llms import LMDeployPipeline

from .mindsearch_agent import MindSearchAgent, MindSearchProtocol
from .mindsearch_prompt import (FINAL_RESPONSE_CN, GRAPH_PROMPT_CN,
searcher_context_template_cn,
searcher_input_template_cn,
searcher_system_prompt_cn)
import src.agent.models as llm_factory
from src.agent.mindsearch_agent import MindSearchAgent, MindSearchProtocol
from src.agent.mindsearch_prompt import (
FINAL_RESPONSE_CN, FINAL_RESPONSE_EN, GRAPH_PROMPT_CN, GRAPH_PROMPT_EN,
searcher_context_template_cn, searcher_context_template_en,
searcher_input_template_cn, searcher_input_template_en,
searcher_system_prompt_cn, searcher_system_prompt_en)

llm = LMDeployPipeline(path='internlm/internlm2_5-7b',
model_name='internlm2',
meta_template=[
dict(role='system', api_role='system'),
dict(role='user', api_role='user'),
dict(role='assistant', api_role='assistant'),
dict(role='environment', api_role='environment')
],
top_p=0.8,
top_k=1,
temperature=0,
max_new_tokens=8192,
repetition_penalty=1.02,
stop_words=['<|im_end|>'])
LLM = {}


def init_agent(lang='cn', model_format='pipeline'):
def init_agent(lang='cn', model_format='internlm_server'):
llm = LLM.get(model_format, None)
if llm is None:
llm_cfg = getattr(llm_factory, model_format)
if llm_cfg is None:
raise NotImplementedError
llm = llm_cfg.pop('type')(**llm_cfg)
LLM[model_format] = llm

agent = MindSearchAgent(
llm=llm,
protocol=MindSearchProtocol(meta_prompt=datetime.now().strftime(
'The current date is %Y-%m-%d.'),
interpreter_prompt=GRAPH_PROMPT_CN,
response_prompt=FINAL_RESPONSE_CN),
interpreter_prompt=GRAPH_PROMPT_CN
if lang == 'cn' else GRAPH_PROMPT_EN,
response_prompt=FINAL_RESPONSE_CN
if lang == 'cn' else FINAL_RESPONSE_EN),
searcher_cfg=dict(
llm=llm,
plugin=ActionExecutor(
plugin_executor=ActionExecutor(
BingBrowser(
api_key=os.environ.get('BING_API_KEY', 'YOUR_BING_API'))),
protocol=MindSearchProtocol(
meta_prompt=datetime.now().strftime(
'The current date is %Y-%m-%d.'),
plugin_prompt=searcher_system_prompt_cn,
plugin_prompt=searcher_system_prompt_cn
if lang == 'cn' else searcher_system_prompt_en,
),
template=dict(input=searcher_input_template_cn,
context=searcher_context_template_cn)))
template=dict(input=searcher_input_template_cn
if lang == 'cn' else searcher_input_template_en,
context=searcher_context_template_cn
if lang == 'cn' else searcher_context_template_en)))
return agent
3 changes: 1 addition & 2 deletions src/agent/mindsearch_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,8 +322,7 @@ def _generate_reference(self, agent_return, code, as_dict):
def execute_code(self, command: str):

def extract_code(text: str) -> str:
text = re.sub(r'from ([\w.]+) import',
'from lagent.agents.mindsearch_agent import', text)
text = re.sub(r'from ([\w.]+) import WebSearchGraph', '', text)
triple_match = re.search(r'```[^\n]*\n(.+?)```', text, re.DOTALL)
single_match = re.search(r'`([^`]*)`', text, re.DOTALL)
if triple_match:
Expand Down
29 changes: 29 additions & 0 deletions src/agent/models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import os

from lagent.llms import GPTAPI, INTERNLM2_META, LMDeployClient, LMDeployServer

internlm_server = dict(type=LMDeployServer,
path='internlm/internlm2_5-7b',
model_name='internlm2',
meta_template=INTERNLM2_META,
top_p=0.8,
top_k=1,
temperature=0,
max_new_tokens=8192,
repetition_penalty=1.02,
stop_words=['<|im_end|>'])

internlm_client = dict(type=LMDeployClient,
model_name='internlm2_5-7b',
url='http://127.0.0.1:23333',
meta_template=INTERNLM2_META,
top_p=0.8,
top_k=1,
temperature=0,
max_new_tokens=8192,
repetition_penalty=1.02,
stop_words=['<|im_end|>'])

gpt4 = dict(type=GPTAPI,
model_type='gpt-4-turbo',
key=os.environ.get('OPENAI_API_KEY', 'YOUR OPENAI API KEY'))

0 comments on commit 5bc152e

Please sign in to comment.