Skip to content

Commit

Permalink
Merge pull request langchain-ai#2 from benjaminliugang/MAX-32464-a
Browse files Browse the repository at this point in the history
MAX-32464 Add log to check cost not report issue on zinc related instance
  • Loading branch information
shinxi authored Jan 22, 2024
2 parents 78e95c6 + b6b2943 commit 917e87e
Showing 1 changed file with 4 additions and 0 deletions.
4 changes: 4 additions & 0 deletions libs/community/langchain_community/callbacks/openai_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,12 +189,16 @@ def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
completion_tokens = token_usage.get("completion_tokens", 0)
prompt_tokens = token_usage.get("prompt_tokens", 0)
model_name = standardize_model_name(response.llm_output.get("model_name", ""))
# TODO Remove the following test log
print(f"Report metrics cost issue check: {model_name}, {response.llm_output.get('model_name', '')}, {model_name in MODEL_COST_PER_1K_TOKENS}") # noqa: E501
if model_name in MODEL_COST_PER_1K_TOKENS:
completion_cost = get_openai_token_cost_for_model(
model_name, completion_tokens, is_completion=True
)
prompt_cost = get_openai_token_cost_for_model(model_name, prompt_tokens)
self.total_cost += prompt_cost + completion_cost
# TODO Remove the following test log
print(f"Report metrics cost issue check: completion_cost {completion_cost}, prompt_cost {prompt_cost}, total_cost {self.total_cost}") # noqa: E501
self.total_tokens += token_usage.get("total_tokens", 0)
self.prompt_tokens += prompt_tokens
self.completion_tokens += completion_tokens
Expand Down

0 comments on commit 917e87e

Please sign in to comment.