mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
test(test_streaming.py): set cache to none
This commit is contained in:
parent
2c32f4a588
commit
697497cdfa
2 changed files with 4 additions and 1 deletions
|
@ -14,7 +14,7 @@ import litellm
|
||||||
from litellm import embedding, completion, completion_cost
|
from litellm import embedding, completion, completion_cost
|
||||||
from litellm import RateLimitError
|
from litellm import RateLimitError
|
||||||
litellm.num_retries = 3
|
litellm.num_retries = 3
|
||||||
|
litellm.cache = None
|
||||||
user_message = "Write a short poem about the sky"
|
user_message = "Write a short poem about the sky"
|
||||||
messages = [{"content": user_message, "role": "user"}]
|
messages = [{"content": user_message, "role": "user"}]
|
||||||
|
|
||||||
|
@ -24,6 +24,7 @@ def logger_fn(user_model_dict):
|
||||||
|
|
||||||
def test_completion_custom_provider_model_name():
|
def test_completion_custom_provider_model_name():
|
||||||
try:
|
try:
|
||||||
|
litellm.cache = None
|
||||||
response = completion(
|
response = completion(
|
||||||
model="together_ai/togethercomputer/llama-2-70b-chat",
|
model="together_ai/togethercomputer/llama-2-70b-chat",
|
||||||
messages=messages,
|
messages=messages,
|
||||||
|
@ -41,6 +42,7 @@ def test_completion_custom_provider_model_name():
|
||||||
|
|
||||||
def test_completion_claude():
|
def test_completion_claude():
|
||||||
litellm.set_verbose = False
|
litellm.set_verbose = False
|
||||||
|
litellm.cache = None
|
||||||
litellm.AnthropicConfig(max_tokens_to_sample=200, metadata={"user_id": "1224"})
|
litellm.AnthropicConfig(max_tokens_to_sample=200, metadata={"user_id": "1224"})
|
||||||
try:
|
try:
|
||||||
# test without max tokens
|
# test without max tokens
|
||||||
|
|
|
@ -227,6 +227,7 @@ def streaming_format_tests(idx, chunk):
|
||||||
|
|
||||||
def test_completion_cohere_stream_bad_key():
|
def test_completion_cohere_stream_bad_key():
|
||||||
try:
|
try:
|
||||||
|
litellm.cache = None
|
||||||
api_key = "bad-key"
|
api_key = "bad-key"
|
||||||
messages = [
|
messages = [
|
||||||
{"role": "system", "content": "You are a helpful assistant."},
|
{"role": "system", "content": "You are a helpful assistant."},
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue