mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
fix(router.py): fixing embedding call
This commit is contained in:
parent
f41683011a
commit
9185d57f6d
1 changed files with 0 additions and 2 deletions
|
@ -241,7 +241,6 @@ class Router:
|
||||||
is_async: Optional[bool] = False,
|
is_async: Optional[bool] = False,
|
||||||
**kwargs) -> Union[List[float], None]:
|
**kwargs) -> Union[List[float], None]:
|
||||||
# pick the one that is available (lowest TPM/RPM)
|
# pick the one that is available (lowest TPM/RPM)
|
||||||
kwargs.setdefault("metadata", {}).update({"model_group": model})
|
|
||||||
deployment = self.get_available_deployment(model=model, input=input)
|
deployment = self.get_available_deployment(model=model, input=input)
|
||||||
|
|
||||||
data = deployment["litellm_params"]
|
data = deployment["litellm_params"]
|
||||||
|
@ -257,7 +256,6 @@ class Router:
|
||||||
is_async: Optional[bool] = True,
|
is_async: Optional[bool] = True,
|
||||||
**kwargs) -> Union[List[float], None]:
|
**kwargs) -> Union[List[float], None]:
|
||||||
# pick the one that is available (lowest TPM/RPM)
|
# pick the one that is available (lowest TPM/RPM)
|
||||||
kwargs.setdefault("metadata", {}).update({"model_group": model})
|
|
||||||
deployment = self.get_available_deployment(model=model, input=input)
|
deployment = self.get_available_deployment(model=model, input=input)
|
||||||
|
|
||||||
data = deployment["litellm_params"]
|
data = deployment["litellm_params"]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue