mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
updates
This commit is contained in:
parent
fbef73d043
commit
711ec580c6
2 changed files with 10 additions and 9 deletions
1
code_gen_proxy
Submodule
1
code_gen_proxy
Submodule
|
@ -0,0 +1 @@
|
|||
Subproject commit 33789ae2701f663b6263bda9aabf1a50ac27fa9d
|
|
@ -79,15 +79,15 @@ def test_completion_claude_stream():
|
|||
# except Exception as e:
|
||||
# pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
# def test_completion_hf_deployed_api():
|
||||
# try:
|
||||
# user_message = "There's a llama in my garden 😱 What should I do?"
|
||||
# messages = [{ "content": user_message,"role": "user"}]
|
||||
# response = completion(model="meta-llama/Llama-2-7b-chat-hf", messages=messages, custom_llm_provider="huggingface", custom_api_base="https://s7c7gytn18vnu4tw.us-east-1.aws.endpoints.huggingface.cloud", logger_fn=logger_fn)
|
||||
# # Add any assertions here to check the response
|
||||
# print(response)
|
||||
# except Exception as e:
|
||||
# pytest.fail(f"Error occurred: {e}")
|
||||
def test_completion_hf_deployed_api():
|
||||
try:
|
||||
user_message = "There's a llama in my garden 😱 What should I do?"
|
||||
messages = [{ "content": user_message,"role": "user"}]
|
||||
response = completion(model="meta-llama/Llama-2-7b-chat-hf", messages=messages, custom_llm_provider="huggingface", custom_api_base="https://s7c7gytn18vnu4tw.us-east-1.aws.endpoints.huggingface.cloud", logger_fn=logger_fn)
|
||||
# Add any assertions here to check the response
|
||||
print(response)
|
||||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
|
||||
def test_completion_cohere():
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue