forked from phoenix/litellm-mirror
fixing acompletion
This commit is contained in:
parent
eb475ad0be
commit
82a75a9d92
5 changed files with 28 additions and 3 deletions
Binary file not shown.
Binary file not shown.
|
@ -2,6 +2,7 @@ import os, openai, cohere, replicate, sys
|
|||
from typing import Any
|
||||
from anthropic import Anthropic, HUMAN_PROMPT, AI_PROMPT
|
||||
import traceback
|
||||
from functools import partial
|
||||
import dotenv
|
||||
import traceback
|
||||
import litellm
|
||||
|
@ -261,8 +262,11 @@ def completion(
|
|||
async def acompletion(*args, **kwargs):
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
# Call the synchronous function using run_in_executor()
|
||||
return loop.run_in_executor(None, completion, *args, **kwargs)
|
||||
# Use a partial function to pass your keyword arguments
|
||||
func = partial(completion, *args, **kwargs)
|
||||
|
||||
# Call the synchronous function using run_in_executor
|
||||
return await loop.run_in_executor(None, func)
|
||||
|
||||
### EMBEDDING ENDPOINTS ####################
|
||||
@client
|
||||
|
|
21
litellm/tests/test_async_fn.py
Normal file
21
litellm/tests/test_async_fn.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
#### What this tests ####
|
||||
# This tests the the acompletion function
|
||||
|
||||
import sys, os
|
||||
import pytest
|
||||
import traceback
|
||||
import asyncio
|
||||
sys.path.insert(0, os.path.abspath('../..')) # Adds the parent directory to the system path
|
||||
from litellm import acompletion
|
||||
|
||||
async def test_get_response():
|
||||
user_message = "Hello, how are you?"
|
||||
messages = [{ "content": user_message,"role": "user"}]
|
||||
try:
|
||||
response = await acompletion(model="gpt-3.5-turbo", messages=messages)
|
||||
except Exception as e:
|
||||
pytest.fail(f"error occurred: {e}")
|
||||
return response
|
||||
|
||||
response = asyncio.run(test_get_response())
|
||||
print(response)
|
2
setup.py
2
setup.py
|
@ -2,7 +2,7 @@ from setuptools import setup, find_packages
|
|||
|
||||
setup(
|
||||
name='litellm',
|
||||
version='0.1.227',
|
||||
version='0.1.228',
|
||||
description='Library to easily interface with LLM API providers',
|
||||
author='BerriAI',
|
||||
packages=[
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue