Skip to content

Commit

Permalink
Merge pull request geekan#103 from alitrack/main
Browse files Browse the repository at this point in the history
 options: update_costs & calc_usage
  • Loading branch information
geekan authored Aug 2, 2023
2 parents 522ba1f + 4027733 commit bdb1002
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 8 deletions.
6 changes: 5 additions & 1 deletion config/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -61,4 +61,8 @@ SD_T2I_API: "/sdapi/v1/txt2img"
#### for Mermaid CLI
## If you installed mmdc (Mermaid CLI) only for metagpt then enable the following configuration.
#PUPPETEER_CONFIG: "./config/puppeteer-config.json"
#MMDC: "./node_modules/.bin/mmdc"
#MMDC: "./node_modules/.bin/mmdc"

### for update_costs & calc_usage
UPDATE_COSTS: false
CALC_USAGE: false
4 changes: 4 additions & 0 deletions metagpt/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,10 @@ def __init__(self, yaml_file=default_yaml_file):
self.total_cost = 0.0
self.puppeteer_config = self._get("PUPPETEER_CONFIG","")
self.mmdc = self._get("MMDC","mmdc")
self.update_costs = self._get("UPDATE_COSTS",True)
self.calc_usage = self._get("CALC_USAGE",True)



def _init_with_config_files_and_env(self, configs: dict, yaml_file):
"""从config/key.yaml / config/config.yaml / env三处按优先级递减加载"""
Expand Down
16 changes: 9 additions & 7 deletions metagpt/provider/openai_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,10 +221,11 @@ async def acompletion_text(self, messages: list[dict], stream=False) -> str:

def _calc_usage(self, messages: list[dict], rsp: str) -> dict:
usage = {}
prompt_tokens = count_message_tokens(messages, self.model)
completion_tokens = count_string_tokens(rsp, self.model)
usage['prompt_tokens'] = prompt_tokens
usage['completion_tokens'] = completion_tokens
if CONFIG.calc_usage:
prompt_tokens = count_message_tokens(messages, self.model)
completion_tokens = count_string_tokens(rsp, self.model)
usage['prompt_tokens'] = prompt_tokens
usage['completion_tokens'] = completion_tokens
return usage

async def acompletion_batch(self, batch: list[list[dict]]) -> list[dict]:
Expand Down Expand Up @@ -254,9 +255,10 @@ async def acompletion_batch_text(self, batch: list[list[dict]]) -> list[str]:
return results

def _update_costs(self, usage: dict):
prompt_tokens = int(usage['prompt_tokens'])
completion_tokens = int(usage['completion_tokens'])
self._cost_manager.update_cost(prompt_tokens, completion_tokens, self.model)
if CONFIG.update_costs:
prompt_tokens = int(usage['prompt_tokens'])
completion_tokens = int(usage['completion_tokens'])
self._cost_manager.update_cost(prompt_tokens, completion_tokens, self.model)

def get_costs(self) -> Costs:
return self._cost_manager.get_costs()

0 comments on commit bdb1002

Please sign in to comment.