forked from phoenix/litellm-mirror
fix(utils.py): support time based pricing for openai-compatible together ai
This commit is contained in:
parent
efb6123d28
commit
25a0e15727
1 changed files with 5 additions and 1 deletions
|
@ -3455,7 +3455,11 @@ def completion_cost(
|
||||||
else:
|
else:
|
||||||
raise Exception(f"Model={model} not found in completion cost model map")
|
raise Exception(f"Model={model} not found in completion cost model map")
|
||||||
# Calculate cost based on prompt_tokens, completion_tokens
|
# Calculate cost based on prompt_tokens, completion_tokens
|
||||||
if "togethercomputer" in model or "together_ai" in model:
|
if (
|
||||||
|
"togethercomputer" in model
|
||||||
|
or "together_ai" in model
|
||||||
|
or custom_llm_provider == "together_ai"
|
||||||
|
):
|
||||||
# together ai prices based on size of llm
|
# together ai prices based on size of llm
|
||||||
# get_model_params_and_category takes a model name and returns the category of LLM size it is in model_prices_and_context_window.json
|
# get_model_params_and_category takes a model name and returns the category of LLM size it is in model_prices_and_context_window.json
|
||||||
model = get_model_params_and_category(model)
|
model = get_model_params_and_category(model)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue