feat(main.py): support router.chat.completions.create

allows using router with instructor

https://github.com/BerriAI/litellm/issues/2673
This commit is contained in:
Krrish Dholakia 2024-03-25 08:26:28 -07:00
parent 9e9de7f6e2
commit f98aead602
3 changed files with 91 additions and 42 deletions

View file

@ -116,24 +116,54 @@ class LiteLLM:
default_headers: Optional[Mapping[str, str]] = None,
):
self.params = locals()
self.chat = Chat(self.params)
self.chat = Chat(self.params, router_obj=None)
class Chat:
def __init__(self, params):
def __init__(self, params, router_obj: Optional[Any]):
self.params = params
self.completions = Completions(self.params)
if self.params.get("acompletion", False) == True:
self.params.pop("acompletion")
self.completions: Union[AsyncCompletions, Completions] = AsyncCompletions(
self.params, router_obj=router_obj
)
else:
self.completions = Completions(self.params, router_obj=router_obj)
class Completions:
def __init__(self, params):
def __init__(self, params, router_obj: Optional[Any]):
self.params = params
self.router_obj = router_obj
def create(self, messages, model=None, **kwargs):
for k, v in kwargs.items():
self.params[k] = v
model = model or self.params.get("model")
response = completion(model=model, messages=messages, **self.params)
if self.router_obj is not None:
response = self.router_obj.completion(
model=model, messages=messages, **self.params
)
else:
response = completion(model=model, messages=messages, **self.params)
return response
class AsyncCompletions:
def __init__(self, params, router_obj: Optional[Any]):
self.params = params
self.router_obj = router_obj
async def create(self, messages, model=None, **kwargs):
for k, v in kwargs.items():
self.params[k] = v
model = model or self.params.get("model")
if self.router_obj is not None:
response = await self.router_obj.acompletion(
model=model, messages=messages, **self.params
)
else:
response = await acompletion(model=model, messages=messages, **self.params)
return response