Skip to content

Commit

Permalink
feat(agent/core): Add max_output_tokens parameter to `create_chat_c…
Browse files Browse the repository at this point in the history
…ompletion` interface
  • Loading branch information
Pwuts committed Apr 22, 2024
1 parent 35ebb10 commit 7bb7c30
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -416,12 +416,17 @@ async def create_chat_completion(
model_name: OpenAIModelName,
completion_parser: Callable[[AssistantChatMessage], _T] = lambda _: None,
functions: Optional[list[CompletionModelFunction]] = None,
max_output_tokens: Optional[int] = None,
**kwargs,
) -> ChatModelResponse[_T]:
"""Create a completion using the OpenAI API."""

openai_messages, completion_kwargs = self._get_chat_completion_args(
model_prompt, model_name, functions, **kwargs
model_prompt=model_prompt,
model_name=model_name,
functions=functions,
max_tokens=max_output_tokens,
**kwargs,
)
tool_calls_compat_mode = bool(functions and "tools" not in completion_kwargs)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -357,6 +357,7 @@ async def create_chat_completion(
model_name: str,
completion_parser: Callable[[AssistantChatMessage], _T] = lambda _: None,
functions: Optional[list[CompletionModelFunction]] = None,
max_output_tokens: Optional[int] = None,
**kwargs,
) -> ChatModelResponse[_T]:
...
2 changes: 1 addition & 1 deletion autogpts/autogpt/autogpt/processing/text.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ async def _process_text(
model_prompt=prompt.messages,
model_name=model,
temperature=0.5,
max_tokens=max_result_tokens,
max_output_tokens=max_result_tokens,
completion_parser=lambda s: (
extract_list_from_json(s.content) if output_type is not str else None
),
Expand Down

0 comments on commit 7bb7c30

Please sign in to comment.