test(test_streaming.py): set cache to none

This commit is contained in:
Krrish Dholakia 2023-11-10 15:24:10 -08:00
parent 2c32f4a588
commit 697497cdfa
2 changed files with 4 additions and 1 deletions

View file

@ -14,7 +14,7 @@ import litellm
from litellm import embedding, completion, completion_cost
from litellm import RateLimitError
litellm.num_retries = 3
litellm.cache = None
user_message = "Write a short poem about the sky"
messages = [{"content": user_message, "role": "user"}]
@ -24,6 +24,7 @@ def logger_fn(user_model_dict):
def test_completion_custom_provider_model_name():
try:
litellm.cache = None
response = completion(
model="together_ai/togethercomputer/llama-2-70b-chat",
messages=messages,
@ -41,6 +42,7 @@ def test_completion_custom_provider_model_name():
def test_completion_claude():
litellm.set_verbose = False
litellm.cache = None
litellm.AnthropicConfig(max_tokens_to_sample=200, metadata={"user_id": "1224"})
try:
# test without max tokens

View file

@ -227,6 +227,7 @@ def streaming_format_tests(idx, chunk):
def test_completion_cohere_stream_bad_key():
try:
litellm.cache = None
api_key = "bad-key"
messages = [
{"role": "system", "content": "You are a helpful assistant."},