Skip to content

Commit

Permalink
Reduce max output tokens
Browse files Browse the repository at this point in the history
  • Loading branch information
tom-doerr committed Aug 31, 2021
1 parent 137fbf9 commit fff8fa7
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion python/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
def complete_input_max_length(input_prompt, max_input_length=MAX_SUPPORTED_INPUT_LENGTH):
input_prompt = input_prompt[-max_input_length:]

response = openai.Completion.create(engine='davinci-codex', prompt=input_prompt, best_of=1, temperature=0.5, max_tokens=128, stream=USE_STREAM_FEATURE)
response = openai.Completion.create(engine='davinci-codex', prompt=input_prompt, best_of=1, temperature=0.5, max_tokens=64, stream=USE_STREAM_FEATURE)
return response

def complete_input(input_prompt):
Expand Down

0 comments on commit fff8fa7

Please sign in to comment.