fix(together_ai.py): additional logging for together ai encoding prompt

This commit is contained in:
Krrish Dholakia 2023-12-15 10:39:23 -08:00
parent cab870f73a
commit a09a6f24a4

View file

@ -177,6 +177,7 @@ def completion(
model_response["choices"][0]["message"]["content"] = completion_response["output"]["choices"][0]["text"] model_response["choices"][0]["message"]["content"] = completion_response["output"]["choices"][0]["text"]
## CALCULATING USAGE ## CALCULATING USAGE
print_verbose(f"CALCULATING TOGETHERAI TOKEN USAGE. Model Response: {model_response}; model_response['choices'][0]['message'].get('content', ''): {model_response['choices'][0]['message'].get('content', None)}")
prompt_tokens = len(encoding.encode(prompt)) prompt_tokens = len(encoding.encode(prompt))
completion_tokens = len( completion_tokens = len(
encoding.encode(model_response["choices"][0]["message"].get("content", "")) encoding.encode(model_response["choices"][0]["message"].get("content", ""))