Skip to content

Commit

Permalink
support streaming. not enable yet.
Browse files Browse the repository at this point in the history
  • Loading branch information
geekan committed Jul 5, 2023
1 parent 504de57 commit a69f07e
Show file tree
Hide file tree
Showing 2 changed files with 32 additions and 2 deletions.
5 changes: 4 additions & 1 deletion examples/llm_hello_world.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,14 @@ async def main():
logger.info(await llm.aask('hello world'))
logger.info(await llm.aask_batch(['hi', 'write python hello world.']))

hello_msg = [{'role': 'user', 'content': 'hello'}]
hello_msg = [{'role': 'user', 'content': 'count from 1 to 10. split by newline.'}]
logger.info(await llm.acompletion(hello_msg))
logger.info(await llm.acompletion_batch([hello_msg]))
logger.info(await llm.acompletion_batch_text([hello_msg]))

logger.info(await llm.acompletion_text(hello_msg))
await llm.acompletion_text(hello_msg, stream=True)


if __name__ == '__main__':
asyncio.run(main())
29 changes: 28 additions & 1 deletion metagpt/provider/openai_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,31 @@ def __init_openai(self, config):
openai.api_version = config.openai_api_version
self.rpm = int(config.get("RPM", 10))

async def _achat_completion_stream(self, messages: list[dict]) -> str:
response = await openai.ChatCompletion.acreate(
model=self.model,
messages=messages,
max_tokens=self.config.max_tokens_rsp,
n=1,
stop=None,
temperature=0,
stream=True
)

# create variables to collect the stream of chunks
collected_chunks = []
collected_messages = []
# iterate through the stream of events
async for chunk in response:
collected_chunks.append(chunk) # save the event response
chunk_message = chunk['choices'][0]['delta'] # extract the message
collected_messages.append(chunk_message) # save the message
if "content" in chunk_message:
print(chunk_message["content"], end="")

full_reply_content = ''.join([m.get('content', '') for m in collected_messages])
return full_reply_content

async def _achat_completion(self, messages: list[dict]) -> dict:
rsp = await self.llm.ChatCompletion.acreate(
model=self.model,
Expand Down Expand Up @@ -180,7 +205,9 @@ async def acompletion(self, messages: list[dict]) -> dict:
# messages = self.messages_to_dict(messages)
return await self._achat_completion(messages)

async def acompletion_text(self, messages: list[dict]) -> str:
async def acompletion_text(self, messages: list[dict], stream=False) -> str:
if stream:
return await self._achat_completion_stream(messages)
rsp = await self._achat_completion(messages)
return self.get_choice_text(rsp)

Expand Down

0 comments on commit a69f07e

Please sign in to comment.