test(test_proxy_server.py): add testing for sagemaker embeddings to proxy server tests

This commit is contained in:
Krrish Dholakia 2023-12-14 14:38:27 -08:00
parent 51d62189f1
commit 03dd7bff13
2 changed files with 22 additions and 0 deletions

View file

@ -76,3 +76,6 @@ model_list:
- model_name: amazon-embeddings - model_name: amazon-embeddings
litellm_params: litellm_params:
model: "bedrock/amazon.titan-embed-text-v1" model: "bedrock/amazon.titan-embed-text-v1"
- model_name: "GPT-J 6B - Sagemaker Text Embedding (Internal)"
litellm_params:
model: "sagemaker/berri-benchmarking-gpt-j-6b-fp16"

View file

@ -140,6 +140,25 @@ def test_bedrock_embedding(client_no_auth):
except Exception as e: except Exception as e:
pytest.fail(f"LiteLLM Proxy test failed. Exception - {str(e)}") pytest.fail(f"LiteLLM Proxy test failed. Exception - {str(e)}")
def test_sagemaker_embedding(client_no_auth):
global headers
from litellm.proxy.proxy_server import user_custom_auth
try:
test_data = {
"model": "GPT-J 6B - Sagemaker Text Embedding (Internal)",
"input": ["good morning from litellm"],
}
response = client_no_auth.post("/v1/embeddings", json=test_data)
assert response.status_code == 200
result = response.json()
print(len(result["data"][0]["embedding"]))
assert len(result["data"][0]["embedding"]) > 10 # this usually has len==1536 so
except Exception as e:
pytest.fail(f"LiteLLM Proxy test failed. Exception - {str(e)}")
# Run the test # Run the test
# test_embedding() # test_embedding()