forked from phoenix/litellm-mirror
fix(caching.py): dump model response object as json
This commit is contained in:
parent
b8c3896323
commit
1665b872c3
4 changed files with 3 additions and 2 deletions
BIN
dist/litellm-1.0.0.dev1-py3-none-any.whl
vendored
Normal file
BIN
dist/litellm-1.0.0.dev1-py3-none-any.whl
vendored
Normal file
Binary file not shown.
BIN
dist/litellm-1.0.0.dev1.tar.gz
vendored
Normal file
BIN
dist/litellm-1.0.0.dev1.tar.gz
vendored
Normal file
Binary file not shown.
|
@ -11,7 +11,6 @@ import litellm
|
|||
import time, logging
|
||||
import json, traceback
|
||||
|
||||
|
||||
def get_prompt(*args, **kwargs):
|
||||
# make this safe checks, it should not throw any exceptions
|
||||
if len(args) > 1:
|
||||
|
@ -222,6 +221,8 @@ class Cache:
|
|||
else:
|
||||
cache_key = self.get_cache_key(*args, **kwargs)
|
||||
if cache_key is not None:
|
||||
if isinstance(result, litellm.ModelResponse):
|
||||
result = result.model_dump_json()
|
||||
self.cache.set_cache(cache_key, result, **kwargs)
|
||||
except:
|
||||
pass
|
||||
|
|
|
@ -31,7 +31,7 @@ def test_completion_openai_prompt():
|
|||
#print(response.choices[0].text)
|
||||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
# test_completion_openai_prompt()
|
||||
test_completion_openai_prompt()
|
||||
|
||||
|
||||
def test_completion_chatgpt_prompt():
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue