Skip to content

Commit

Permalink
improve mock functionality
Browse files Browse the repository at this point in the history
  • Loading branch information
abi committed Mar 8, 2024
1 parent fa58f2c commit a0f5af0
Showing 1 changed file with 5 additions and 2 deletions.
7 changes: 5 additions & 2 deletions backend/mock_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@
from custom_types import InputMode


STREAM_CHUNK_SIZE = 100


async def mock_completion(
process_chunk: Callable[[str], Awaitable[None]], input_mode: InputMode
) -> str:
Expand All @@ -13,8 +16,8 @@ async def mock_completion(
else NO_IMAGES_NYTIMES_MOCK_CODE
)

for i in range(0, len(code_to_return), 100):
await process_chunk(code_to_return[i : i + 100])
for i in range(0, len(code_to_return), STREAM_CHUNK_SIZE):
await process_chunk(code_to_return[i : i + STREAM_CHUNK_SIZE])
await asyncio.sleep(0.01)

return code_to_return
Expand Down

0 comments on commit a0f5af0

Please sign in to comment.