forked from phoenix/litellm-mirror
(ci/cd) fixes
This commit is contained in:
parent
ffcd6b6a63
commit
9bde3ccd1d
2 changed files with 1 additions and 2 deletions
|
@ -885,7 +885,7 @@ async def test_acompletion_claude2_1():
|
|||
},
|
||||
{"role": "user", "content": "Generate a 3 liner joke for me"},
|
||||
]
|
||||
# test without max tokens
|
||||
# test without max-tokens
|
||||
response = await litellm.acompletion(model="claude-2.1", messages=messages)
|
||||
# Add any assertions here to check the response
|
||||
print(response)
|
||||
|
|
|
@ -84,7 +84,6 @@ model_list:
|
|||
model: text-completion-openai/gpt-3.5-turbo-instruct
|
||||
litellm_settings:
|
||||
drop_params: True
|
||||
enable_preview_features: True
|
||||
# max_budget: 100
|
||||
# budget_duration: 30d
|
||||
num_retries: 5
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue