mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
fix(main.py): fix together ai text completion call
This commit is contained in:
parent
59080431b8
commit
a854824c02
3 changed files with 29 additions and 1 deletions
|
@ -3990,6 +3990,26 @@ def test_async_text_completion():
|
|||
asyncio.run(test_get_response())
|
||||
|
||||
|
||||
def test_async_text_completion_together_ai():
|
||||
litellm.set_verbose = True
|
||||
print("test_async_text_completion")
|
||||
|
||||
async def test_get_response():
|
||||
try:
|
||||
response = await litellm.atext_completion(
|
||||
model="together_ai/codellama/CodeLlama-13b-Instruct-hf",
|
||||
prompt="good morning",
|
||||
max_tokens=10,
|
||||
)
|
||||
print(f"response: {response}")
|
||||
except litellm.Timeout as e:
|
||||
print(e)
|
||||
except Exception as e:
|
||||
pytest.fail("An unexpected error occurred")
|
||||
|
||||
asyncio.run(test_get_response())
|
||||
|
||||
|
||||
# test_async_text_completion()
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue