fix(caching.py): dump model response object as json

This commit is contained in:
Krrish Dholakia 2023-11-13 10:40:43 -08:00
parent b8c3896323
commit 1665b872c3
4 changed files with 3 additions and 2 deletions

BIN
dist/litellm-1.0.0.dev1-py3-none-any.whl vendored Normal file

Binary file not shown.

BIN
dist/litellm-1.0.0.dev1.tar.gz vendored Normal file

Binary file not shown.

View file

@ -11,7 +11,6 @@ import litellm
import time, logging
import json, traceback
def get_prompt(*args, **kwargs):
# make this safe checks, it should not throw any exceptions
if len(args) > 1:
@ -222,6 +221,8 @@ class Cache:
else:
cache_key = self.get_cache_key(*args, **kwargs)
if cache_key is not None:
if isinstance(result, litellm.ModelResponse):
result = result.model_dump_json()
self.cache.set_cache(cache_key, result, **kwargs)
except:
pass

View file

@ -31,7 +31,7 @@ def test_completion_openai_prompt():
#print(response.choices[0].text)
except Exception as e:
pytest.fail(f"Error occurred: {e}")
# test_completion_openai_prompt()
test_completion_openai_prompt()
def test_completion_chatgpt_prompt():